diff --git a/.coveragerc b/.coveragerc
index 013b0fd4..5c0ca9b7 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,6 +1,21 @@
[run]
+source_pkgs =
+ pyspedas
+ pytplot
omit =
- /home/travis/virtualenv/*
- */site-packages/*
*/bin/*
*/docs/*
+ */QtPlotter/*
+ */pyspedas/utilities/data_exists.py
+ */pyspedas/utilities/time_double.py
+ */pyspedas/utilities/time_string.py
+ */pyspedas/utilities/tkm2re.py
+ */pyspedas/utilities/tnames.py
+ */pyspedas/analysis/clean_spikes.py
+ */pyspedas/analysis/subtract_average.py
+ */pyspedas/analysis/subtract_median.py
+ */pyspedas/analysis/tcross.py
+ */pyspedas/analysis/tdotp.py
+ */pyspedas/analysis/time_clip.py
+ */pyspedas/analysis/tnormalize.py
+ */pyspedas/analysis/tsmooth.py
diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml
index e96cc2dc..d8634fda 100644
--- a/.github/workflows/pythonpackage.yml
+++ b/.github/workflows/pythonpackage.yml
@@ -10,10 +10,10 @@ jobs:
matrix:
#python-version: [3.5, 3.6, 3.7, 3.8]
#os: [ubuntu-latest, macos-latest, windows-latest]
- os: [macos-latest]
+ os: [ubuntu-latest]
#os: [ubuntu-latest]
#os: [windows-latest]
- python-version: [3.9]
+ python-version: [3.8]
steps:
- uses: actions/checkout@v2
@@ -21,26 +21,41 @@ jobs:
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- if: github.ref == 'refs/heads/master'
+ if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/eric-superfast' || github.ref == 'refs/heads/themis'
- name: Install dependencies (Linux)
+ env:
+ MMS_AUTH_U: ${{ secrets.MMS_AUTH_U }}
+ MMS_AUTH_P: ${{ secrets.MMS_AUTH_P }}
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
+ pip install spacepy # required for MMS qcotrans tests
pip install coveralls
+ pip install basemap
python -m pyspedas.mms.tests.setup_tests
- if: matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/master'
+ if: matrix.os == 'ubuntu-latest' && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/eric-superfast' || github.ref == 'refs/heads/themis')
- name: Install dependencies (Windows)
+ env:
+ MMS_AUTH_U: ${{ secrets.MMS_AUTH_U }}
+ MMS_AUTH_P: ${{ secrets.MMS_AUTH_P }}
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
+ pip install spacepy # required for MMS qcotrans tests
pip install coveralls
+ pip install basemap
python -m pyspedas.mms.tests.setup_tests
if: matrix.os == 'windows-latest' && github.ref == 'refs/heads/master'
- name: Install dependencies (macOS)
+ env:
+ MMS_AUTH_U: ${{ secrets.MMS_AUTH_U }}
+ MMS_AUTH_P: ${{ secrets.MMS_AUTH_P }}
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
+ pip install spacepy # required for MMS qcotrans tests
pip install coveralls
+ pip install basemap
python -m pyspedas.mms.tests.setup_tests
if: matrix.os == 'macos-latest' && github.ref == 'refs/heads/master'
- name: Lint with flake8
@@ -48,27 +63,75 @@ jobs:
run: |
pip install flake8
# stop the build if there are Python syntax errors or undefined names
- flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
+ flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --exclude 'erg'
# exit-zero treats all errors as warnings.
- flake8 . --count --exit-zero --max-complexity=10 --statistics # --max-line-length=127
+ flake8 . --count --exit-zero --max-complexity=10 --statistics # --max-line-length=127 --exclude 'erg'
- name: Test with unittest
- if: github.ref == 'refs/heads/master'
+ if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/eric-superfast' || github.ref == 'refs/heads/themis'
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
+ SPEDAS_DATA_DIR: data
+ ACE_DATA_DIR: ace_data/
+ BARREL_DATA_DIR: barrel_data/
+ CLUSTER_DATA_DIR: cluster_data/
+ DSC_DATA_DIR: dsc_data/
+ CSSWE_DATA_DIR: csswe_data/
+ EQUATORS_DATA_DIR: eqs_data/
+ FAST_DATA_DIR: fast_data/
+ GEOTAIL_DATA_DIR: geotail_data/
+ GOES_DATA_DIR: goes_data/
+ IMAGE_DATA_DIR: img_data/
+ MICA_DATA_DIR: mica_data/
+ MMS_DATA_DIR: mms_data/
+ OMNI_DATA_DIR: omni_data/
+ POES_DATA_DIR: poes_data/
+ POLAR_DATA_DIR: polar_data/
+ PSP_DATA_DIR: psp_data/
+ RBSP_DATA_DIR: rbsp_data/
+ SOLO_DATA_DIR: solo_data/
+ STEREO_DATA_DIR: stereo_data/
+ THM_DATA_DIR: themis_data/
+ TWINS_DATA_DIR: twins_data/
+ ULY_DATA_DIR: uly_data/
+ WIND_DATA_DIR: wind_data/
+ LANL_DATA_DIR: lanl_data/
+ CNOFS_DATA_DIR: cnofs_data/
+ ST5_DATA_DIR: st5_data/
+ PYTPLOT_LOGGING_LEVEL: error
run: |
+ # coverage run -a -m pyspedas.akebono.tests.tests
+ coverage run -a -m pyspedas.barrel.tests.tests
+ coverage run -a -m pyspedas.soho.tests.tests
+ coverage run -a -m pyspedas.de2.tests.tests
+ coverage run -a -m pyspedas.st5.tests.tests
+ coverage run -a -m pyspedas.lanl.tests.tests
+ coverage run -a -m pyspedas.cotrans.tests.quaternions
+ coverage run -a -m pyspedas.cnofs.tests.tests
+ coverage run -a -m pyspedas.secs.tests.tests
+ coverage run -a -m pyspedas.sosmag.tests.tests
+ coverage run -a -m pyspedas.hapi.tests.tests
+ coverage run -a -m pyspedas.mms.tests.cotrans
+ coverage run -a -m pyspedas.mms.tests.events
+ coverage run -a -m pyspedas.mms.tests.orbit_plots
+ coverage run -a -m pyspedas.mms.tests.neutral_sheet
+ coverage run -a -m pyspedas.mms.tests.ql_l1b_sitl_tests
+ coverage run -a -m pyspedas.mms.tests.mms_part_getspec
coverage run -a -m pyspedas.mms.tests.load_routine_tests
coverage run -a -m pyspedas.mms.tests.feeps
- # coverage run -a -m pyspedas.mms.tests.eis
+ coverage run -a -m pyspedas.mms.tests.eis
coverage run -a -m pyspedas.mms.tests.fpi_tests
coverage run -a -m pyspedas.mms.tests.file_filter
coverage run -a -m pyspedas.mms.tests.data_rate_segments
coverage run -a -m pyspedas.mms.tests.curlometer
coverage run -a -m pyspedas.mms.tests.wavpol
+ coverage run -a -m pyspedas.mms.tests.slice2d
coverage run -a -m pyspedas.dscovr.tests.tests
coverage run -a -m pyspedas.utilities.tests.download_tests
coverage run -a -m pyspedas.utilities.tests.misc_tests
coverage run -a -m pyspedas.utilities.tests.time_tests
coverage run -a -m pyspedas.cotrans.tests.cotrans
+ coverage run -a -m pyspedas.cotrans.tests.quaternions
+ coverage run -a -m pyspedas.cotrans.tests.test_minvar
coverage run -a -m pyspedas.cluster.tests.tests
coverage run -a -m pyspedas.csswe.tests.tests
coverage run -a -m pyspedas.ace.tests.tests
@@ -81,8 +144,10 @@ jobs:
coverage run -a -m pyspedas.wind.tests.tests
coverage run -a -m pyspedas.poes.tests.tests
coverage run -a -m pyspedas.polar.tests.tests
+ coverage run -a -m pyspedas.geopack.tests.tests
coverage run -a -m pyspedas.geotail.tests.tests
coverage run -a -m pyspedas.analysis.tests.tests
+ coverage run -a -m pyspedas.analysis.tests.test_twavpol
coverage run -a -m pyspedas.fast.tests.tests
coverage run -a -m pyspedas.omni.tests.tests
coverage run -a -m pyspedas.themis.tests.tests
@@ -95,4 +160,10 @@ jobs:
coverage run -a -m pyspedas.kyoto.tests.tests
coverage run -a -m pyspedas.swarm.tests.tests
coverage run -a -m pyspedas.themis.tests.tests_themis_check_args
+ coverage run -a -m pyspedas.themis.tests.tests_cal_fit
+ coverage run -a -m pyspedas.themis.tests.tests_dsl_cotrans
+ coverage run -a -m pyspedas.themis.tests.tests_lunar_cotrans
+ coverage run -a -m pyspedas.themis.tests.tests_spinmodel
+ coverage run -a -m pyspedas.themis.tests.tests_state
+ coverage run -a -m pyspedas.themis.tests.test_cal_fit_tplot_metadata
coveralls
diff --git a/README.md b/README.md
index d2039e07..8648b003 100644
--- a/README.md
+++ b/README.md
@@ -3,31 +3,27 @@
[![build](https://github.com/spedas/pyspedas/workflows/build/badge.svg)](https://github.com/spedas/pyspedas/actions)
[![Coverage Status](https://coveralls.io/repos/github/spedas/pyspedas/badge.svg)](https://coveralls.io/github/spedas/pyspedas)
[![Version](https://img.shields.io/pypi/v/pyspedas.svg)](https://pypi.org/project/pyspedas/)
-[![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/spedas/pyspedas.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/spedas/pyspedas/context:python)
![Status](https://img.shields.io/pypi/status/pyspedas.svg)
![License](https://img.shields.io/pypi/l/pyspedas.svg)
-PySPEDAS is an implementation of the SPEDAS framework for Python.
-
-The Space Physics Environment Data Analysis Software ([SPEDAS](http://spedas.org/wiki)) framework is written in IDL and contains data loading, data analysis and data plotting tools for various scientific missions (NASA, NOAA, etc.) and ground magnetometers.
-
-Please see our documentation at:
-
-https://pyspedas.readthedocs.io/
-
+The Python-based Space Physics Environment Data Analysis Software (PySPEDAS) framework supports multi-mission, multi-instrument retrieval, analysis, and visualization of heliophysics time series data.
## Projects Supported
- [Advanced Composition Explorer (ACE)](https://pyspedas.readthedocs.io/en/latest/ace.html)
+- [Akebono](https://pyspedas.readthedocs.io/en/latest/akebono.html)
- [Arase (ERG)](https://pyspedas.readthedocs.io/en/latest/erg.html)
- [Cluster](https://pyspedas.readthedocs.io/en/latest/cluster.html)
- [Colorado Student Space Weather Experiment (CSSWE)](https://pyspedas.readthedocs.io/en/latest/csswe.html)
+- [Communications/Navigation Outage Forecasting System (C/NOFS)](https://pyspedas.readthedocs.io/en/latest/cnofs.html)
- [Deep Space Climate Observatory (DSCOVR)](https://pyspedas.readthedocs.io/en/latest/dscovr.html)
+- [Dynamics Explorer 2 (DE2)](https://pyspedas.readthedocs.io/en/latest/de2.html)
- [Equator-S](https://pyspedas.readthedocs.io/en/latest/equator-s.html)
- [Fast Auroral Snapshot Explorer (FAST)](https://pyspedas.readthedocs.io/en/latest/fast.html)
- [Geotail](https://pyspedas.readthedocs.io/en/latest/geotail.html)
- [Geostationary Operational Environmental Satellite (GOES)](https://pyspedas.readthedocs.io/en/latest/goes.html)
- [Imager for Magnetopause-to-Aurora Global Exploration (IMAGE)](https://pyspedas.readthedocs.io/en/latest/image.html)
- [Kyoto Dst Index](https://pyspedas.readthedocs.io/en/latest/kyoto.html)
+- [LANL](https://pyspedas.readthedocs.io/en/latest/lanl.html)
- [Mars Atmosphere and Volatile Evolution (MAVEN)](https://pyspedas.readthedocs.io/en/latest/maven.html)
- [Magnetic Induction Coil Array (MICA)](https://pyspedas.readthedocs.io/en/latest/mica.html)
- [Magnetospheric Multiscale (MMS)](https://pyspedas.readthedocs.io/en/latest/mms.html)
@@ -35,8 +31,10 @@ https://pyspedas.readthedocs.io/
- [Polar Orbiting Environmental Satellites (POES)](https://pyspedas.readthedocs.io/en/latest/poes.html)
- [Polar](https://pyspedas.readthedocs.io/en/latest/polar.html)
- [Parker Solar Probe (PSP)](https://pyspedas.readthedocs.io/en/latest/psp.html)
+- [Solar & Heliospheric Observatory (SOHO)](https://pyspedas.readthedocs.io/en/latest/soho.html)
- [Solar Orbiter (SOLO)](https://pyspedas.readthedocs.io/en/latest/solo.html)
- [Solar Terrestrial Relations Observatory (STEREO)](https://pyspedas.readthedocs.io/en/latest/stereo.html)
+- [Space Technology 5 (ST5)](https://pyspedas.readthedocs.io/en/latest/st5.html)
- [Spherical Elementary Currents (SECS)](https://github.com/spedas/pyspedas/blob/master/pyspedas/secs/README.md)
- [Swarm](https://github.com/spedas/pyspedas/blob/master/pyspedas/swarm/README.md)
- [Time History of Events and Macroscale Interactions during Substorms (THEMIS)](https://pyspedas.readthedocs.io/en/latest/themis.html)
@@ -47,13 +45,13 @@ https://pyspedas.readthedocs.io/
## Requirements
-Python 3.7+ is required.
+Python 3.8+ is required.
We recommend [Anaconda](https://www.continuum.io/downloads/) which comes with a suite of packages useful for scientific data analysis. Step-by-step instructions for installing Anaconda can be found at: [Windows](https://docs.anaconda.com/anaconda/install/windows/), [macOS](https://docs.anaconda.com/anaconda/install/mac-os/), [Linux](https://docs.anaconda.com/anaconda/install/linux/)
## Installation
-### Setup your Virtual Environment
+### Virtual Environment
To avoid potential dependency issues with other Python packages, we suggest creating a virtual environment for PySPEDAS; you can create a virtual environment in your terminal with:
```bash
@@ -158,6 +156,54 @@ stereo_files = pyspedas.stereo.mag(trange=['2013-11-1', '2013-11-6'], downloadon
- `no_update`: if set, only load the data from the local cache
- `notplot`: if set, load the variables into dictionaries containing numpy arrays (instead of creating the tplot variables)
+## Examples
+Please see the following notebooks for examples of using PySPEDAS
+### PyTplot Basics
+- [Introduction to PyTplot](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/Introduction_to_PyTplot.ipynb)
+
+### Loading Data
+- [MMS examples](https://github.com/spedas/mms-examples/tree/master/basic)
+- [THEMIS examples](https://github.com/spedas/themis-examples/tree/main/basic)
+- [Load data from HAPI servers](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PySPEDAS_loading_data_from_HAPI_servers.ipynb)
+- [Exploring the Heliosphere with Python](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/Exploring_the_Heliosphere_with_Python.ipynb)
+
+### Plotting
+- [Annotations](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PyTplot_annotations.ipynb)
+- [Range options](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PyTplot_range_options.ipynb)
+- [Spectrogram options](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PyTplot_spectrogram_options.ipynb)
+- [Legend options](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PyTplot_legend_options.ipynb)
+- [Markers and symbols](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PyTplot_markers_and_symbols.ipynb)
+- [Error bars](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PyTplot_error_bars.ipynb)
+- [Pseudo variables](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PyTplot_pseudo_variables.ipynb)
+- [Highlight intervals and vertical bars](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/PyTplot_highlight_intervals_and_vertical_bars.ipynb)
+
+Additional examples of loading and plotting data can be found in the documentation for the project you're interested in ([PySPEDAS projects](https://pyspedas.readthedocs.io/en/latest/projects.html)), as well as the project's README file.
+
+### Dates and Times
+- [Working with dates and times](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/Working_with_dates_and_times_with_PySPEDAS_PyTplot.ipynb)
+
+### Coordinate Transformations
+- [Coordinate transformations](https://github.com/spedas/pyspedas_examples/blob/master/pyspedas_examples/notebooks/Coordinate_transformations_with_OMNI_data.ipynb)
+- [Boundary normal (LMN) coordinates](https://github.com/spedas/mms-examples/blob/master/advanced/MMS_LMN_coordinate_transformation.ipynb)
+- [Quaternion transformations with SpacePy](https://github.com/spedas/mms-examples/blob/master/basic/MMS_quaternion_coordinate_transformations.ipynb)
+
+### Analysis
+- [Plasma calculations with PlasmaPy](https://github.com/spedas/mms-examples/blob/master/advanced/Plasma%20calculations%20with%20PlasmaPy.ipynb)
+- [Poynting flux with MMS data](https://github.com/spedas/mms-examples/blob/master/advanced/Poynting_flux_with_MMS_data.ipynb)
+- [Plasma beta with MMS data](https://github.com/spedas/mms-examples/blob/master/basic/Plasma%20Beta%20with%20FGM%20and%20FPI%20data.ipynb) (note: the PlasmaPy notebook above shows a much easier method)
+- [Curlometer calculations](https://github.com/spedas/mms-examples/blob/master/basic/Curlometer%20Technique.ipynb)
+- [Neutral sheet models](https://github.com/spedas/mms-examples/blob/master/advanced/MMS_neutral_sheet_models.ipynb)
+- [Wave polarization calculations](https://github.com/spedas/mms-examples/blob/master/advanced/Wave_polarization_using_SCM_data.ipynb)
+- [Dynamic power spectra calculations](https://github.com/spedas/mms-examples/blob/master/basic/Search-coil%20Magnetometer%20(SCM).ipynb)
+- [2D slices of MMS distribution functions](https://github.com/spedas/mms-examples/blob/master/advanced/Generate_2D_slices_of_FPI_and_HPCA_data.ipynb)
+- [Generating spectrograms and moments from MMS distribution functions](https://github.com/spedas/mms-examples/blob/master/advanced/Generate%20spectrograms%20and%20moments%20with%20mms_part_getspec.ipynb)
+
+
+## Documentation
+For more information, please see our HTML documentation at:
+
+https://pyspedas.readthedocs.io/
+
## Getting Help
To find the options supported, call `help` on the instrument function you're interested in:
```python
@@ -169,6 +215,11 @@ You can ask questions by creating an issue or by joining the [SPEDAS mailing lis
## Contributing
We welcome contributions to PySPEDAS; to learn how you can contribute, please see our [Contributing Guide](https://github.com/spedas/pyspedas/blob/master/CONTRIBUTING.md)
+## Plug-in Development
+An introduction to PySPEDAS plug-in development can be found here:
+
+[Introduction to PySPEDAS plug-in development](https://github.com/spedas/pyspedas/tree/master/docs/pyspedas_plugin_development.pdf)
+
## Code of Conduct
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. To learn more, please see our [Code of Conduct](https://github.com/spedas/pyspedas/blob/master/CODE_OF_CONDUCT.md).
diff --git a/docs/pyspedas_plugin_development.pdf b/docs/pyspedas_plugin_development.pdf
new file mode 100644
index 00000000..4287da47
Binary files /dev/null and b/docs/pyspedas_plugin_development.pdf differ
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 9416da80..915a8e90 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -7,4 +7,6 @@ netCDF4
pywavelets
astropy
https://github.com/MAVENSDC/PyTplot/archive/matplotlib-backend.zip
-geopack>=1.0.9
\ No newline at end of file
+geopack>=1.0.9
+viresclient
+hapiclient
\ No newline at end of file
diff --git a/docs/source/_static/cnofs_cindi.png b/docs/source/_static/cnofs_cindi.png
new file mode 100644
index 00000000..7f1189ec
Binary files /dev/null and b/docs/source/_static/cnofs_cindi.png differ
diff --git a/docs/source/_static/cnofs_plp.png b/docs/source/_static/cnofs_plp.png
new file mode 100644
index 00000000..db4bd0b6
Binary files /dev/null and b/docs/source/_static/cnofs_plp.png differ
diff --git a/docs/source/_static/cnofs_vefi.png b/docs/source/_static/cnofs_vefi.png
new file mode 100644
index 00000000..6c87570d
Binary files /dev/null and b/docs/source/_static/cnofs_vefi.png differ
diff --git a/docs/source/_static/rbsp_rbspice.png b/docs/source/_static/rbsp_rbspice.png
new file mode 100644
index 00000000..5598542a
Binary files /dev/null and b/docs/source/_static/rbsp_rbspice.png differ
diff --git a/docs/source/akebono.rst b/docs/source/akebono.rst
new file mode 100644
index 00000000..f0d66407
--- /dev/null
+++ b/docs/source/akebono.rst
@@ -0,0 +1,65 @@
+Akebono
+========================================================================
+The routines in this module can be used to load data from the Akebono mission.
+
+
+Plasma Waves and Sounder experiment (PWS)
+----------------------------------------------------------
+.. autofunction:: pyspedas.akebono.pws
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ pws_vars = pyspedas.akebono.pws(trange=['2012-10-01', '2012-10-02'])
+ tplot('akb_pws_RX1')
+
+.. image:: _static/akebono_pws.png
+ :align: center
+ :class: imgborder
+
+
+Radiation Moniter (RDM)
+----------------------------------------------------------
+.. autofunction:: pyspedas.akebono.rdm
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ rdm_vars = pyspedas.akebono.rdm(trange=['2012-10-01', '2012-10-02'])
+ tplot('akb_rdm_FEIO')
+
+.. image:: _static/akebono_rdm.png
+ :align: center
+ :class: imgborder
+
+
+Orbit data (orb)
+----------------------------------------------------------
+.. autofunction:: pyspedas.akebono.orb
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ orb_vars = pyspedas.akebono.orb(trange=['2012-10-01', '2012-10-02'])
+ tplot(['akb_orb_geo', 'akb_orb_MLT'])
+
+.. image:: _static/akebono_orb.png
+ :align: center
+ :class: imgborder
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/source/cnofs.rst b/docs/source/cnofs.rst
new file mode 100644
index 00000000..de6c77d5
--- /dev/null
+++ b/docs/source/cnofs.rst
@@ -0,0 +1,64 @@
+Communications/Navigation Outage Forecasting System (C/NOFS)
+========================================================================
+The routines in this module can be used to load data from the Communications/Navigation Outage Forecasting System (C/NOFS) mission.
+
+
+Coupled Ion-Neutral Dynamics Investigation (CINDI)
+----------------------------------------------------------
+.. autofunction:: pyspedas.cnofs.cindi
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ cindi_vars = pyspedas.cnofs.cindi(trange=['2013-11-5', '2013-11-6'])
+ tplot(['ionVelocityX', 'ionVelocityY', 'ionVelocityZ'])
+
+.. image:: _static/cnofs_cindi.png
+ :align: center
+ :class: imgborder
+
+
+Vector Electric Field Instrument (VEFI)
+----------------------------------------------------------
+.. autofunction:: pyspedas.cnofs.vefi
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ vefi_vars = pyspedas.cnofs.vefi(trange=['2010-11-5/12:00', '2010-11-5/13:00'])
+ tplot(['E_meridional', 'E_zonal'])
+
+.. image:: _static/cnofs_vefi.png
+ :align: center
+ :class: imgborder
+
+
+Planar Langmuir Probe (PLP)
+----------------------------------------------------------
+.. autofunction:: pyspedas.cnofs.plp
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ plp_vars = pyspedas.cnofs.plp(trange=['2010-11-5', '2010-11-6'])
+ tplot('Ni')
+
+.. image:: _static/cnofs_plp.png
+ :align: center
+ :class: imgborder
+
+
+
+
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 4ba13722..ac9da23f 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -20,9 +20,9 @@
# -- Project information -----------------------------------------------------
-project = 'pySPEDAS'
+project = 'PySPEDAS'
copyright = '2018-2022, UC Regents, unless otherwise indicated'
-author = 'The pySPEDAS Community'
+author = 'The PySPEDAS Community'
# The full version, including alpha/beta/rc tags
release = '1.3'
diff --git a/docs/source/de2.rst b/docs/source/de2.rst
new file mode 100644
index 00000000..a45835e0
--- /dev/null
+++ b/docs/source/de2.rst
@@ -0,0 +1,160 @@
+Dynamics Explorer 2 (DE2)
+========================================================================
+The routines in this module can be used to load data from the Dynamics Explorer 2 (DE2) mission.
+
+
+Magnetometer (MAG)
+----------------------------------------------------------
+.. autofunction:: pyspedas.de2.mag
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ mag_vars = pyspedas.de2.mag(trange=['1983-02-16', '1983-02-17'])
+ tplot(['bx', 'by', 'bz'])
+
+.. image:: _static/de2_mag.png
+ :align: center
+ :class: imgborder
+
+
+Neutral Atmosphere Composition Spectrometer (NACS)
+----------------------------------------------------------
+.. autofunction:: pyspedas.de2.nacs
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ nacs_vars = pyspedas.de2.nacs(trange=['1983-02-16', '1983-02-17'])
+ tplot(['O_density', 'N_density'])
+
+.. image:: _static/de2_nacs.png
+ :align: center
+ :class: imgborder
+
+
+Retarding Potential Analyzer (RPA)
+----------------------------------------------------------
+.. autofunction:: pyspedas.de2.rpa
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ rpa_vars = pyspedas.de2.rpa(trange=['1983-02-16', '1983-02-17'])
+ tplot(['ionDensity', 'ionTemperature'])
+
+.. image:: _static/de2_rpa.png
+ :align: center
+ :class: imgborder
+
+
+Fabry-PĂ©rot Interferometer (FPI)
+----------------------------------------------------------
+.. autofunction:: pyspedas.de2.fpi
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ fpi_vars = pyspedas.de2.fpi(trange=['1983-02-16', '1983-02-17'])
+ tplot('TnF')
+
+.. image:: _static/de2_fpi.png
+ :align: center
+ :class: imgborder
+
+
+Ion Drift Meter (IDM)
+----------------------------------------------------------
+.. autofunction:: pyspedas.de2.idm
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ idm_vars = pyspedas.de2.idm(trange=['1983-02-16', '1983-02-17'])
+ tplot(['ionVelocityZ', 'ionVelocityY'])
+
+.. image:: _static/de2_idm.png
+ :align: center
+ :class: imgborder
+
+
+Wind and Temperature Spectrometer (WATS)
+----------------------------------------------------------
+.. autofunction:: pyspedas.de2.wats
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ wats_vars = pyspedas.de2.wats(trange=['1983-02-16', '1983-02-17'])
+ tplot(['density', 'Tn'])
+
+.. image:: _static/de2_wats.png
+ :align: center
+ :class: imgborder
+
+
+Vector Electric Field Instrument (VEFI)
+----------------------------------------------------------
+.. autofunction:: pyspedas.de2.vefi
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ vefi_vars = pyspedas.de2.vefi(trange=['1983-02-16', '1983-02-17'])
+ tplot(['spectA', 'spectB', 'spectC'])
+
+.. image:: _static/de2_vefi.png
+ :align: center
+ :class: imgborder
+
+
+Langmuir Probe Instrument (LANG)
+----------------------------------------------------------
+.. autofunction:: pyspedas.de2.lang
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ lang_vars = pyspedas.de2.lang(trange=['1983-02-16', '1983-02-17'])
+ tplot(['plasmaDensity', 'electronTemp'])
+
+.. image:: _static/de2_lang.png
+ :align: center
+ :class: imgborder
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 8b8c7353..36e6d95b 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -4,15 +4,15 @@
contain the root `toctree` directive.
:tocdepth: 3
-pySPEDAS Documentation
+PySPEDAS Documentation
====================================
|build| |coverage| |version| |quality| |status| |license|
-pySPEDAS is an implementation of the Space Physics Environment Data Analysis Software (SPEDAS) framework in Python.
+PySPEDAS is an implementation of the Space Physics Environment Data Analysis Software (SPEDAS) framework in Python.
The SPEDAS framework is written in IDL and contains data loading, data analysis and data plotting tools for various scientific missions (NASA, NOAA, etc.) and ground magnetometers.
-pySPEDAS and `pyTplot `_ make creating multi-mission, multi-instrument figures simple, e.g., to create a figure showing magnetometer data from `Solar Orbiter `_, `Parker Solar Probe `_, `MMS `_, and `THEMIS `_,
+PySPEDAS and `pyTplot `_ make creating multi-mission, multi-instrument figures simple, e.g., to create a figure showing magnetometer data from `Solar Orbiter `_, `Parker Solar Probe `_, `MMS `_, and `THEMIS `_,
.. code-block:: python
diff --git a/docs/source/lanl.rst b/docs/source/lanl.rst
new file mode 100644
index 00000000..c7f4004f
--- /dev/null
+++ b/docs/source/lanl.rst
@@ -0,0 +1,46 @@
+LANL
+========================================================================
+The routines in this module can be used to load data from the LANL plasma analyzers.
+
+
+Magnetospheric Plasma Analyzer (MPA)
+----------------------------------------------------------
+.. autofunction:: pyspedas.lanl.mpa
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ mpa_vars = pyspedas.lanl.mpa(trange=['2004-10-31', '2004-11-01'])
+ tplot(['dens_lop', 'vel_lop'])
+
+.. image:: _static/lanl_mpa.png
+ :align: center
+ :class: imgborder
+
+
+Synchronous Orbit Particle Analyzer (SPA)
+----------------------------------------------------------
+.. autofunction:: pyspedas.lanl.spa
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ spa_vars = pyspedas.lanl.spa(trange=['2004-10-31', '2004-11-01'])
+ tplot(['spa_p_temp', 'spa_e_temp'])
+
+.. image:: _static/lanl_spa.png
+ :align: center
+ :class: imgborder
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/source/projects.rst b/docs/source/projects.rst
index bf5fe337..20396f9a 100644
--- a/docs/source/projects.rst
+++ b/docs/source/projects.rst
@@ -5,9 +5,12 @@ Load Routines
:maxdepth: 2
ace
+ akebono
erg
cluster
+ cnofs
csswe
+ de2
dscovr
equator-s
fast
@@ -15,6 +18,7 @@ Load Routines
goes
image
kyoto
+ lanl
maven
mica
mms
@@ -22,7 +26,9 @@ Load Routines
poes
polar
psp
+ soho
solo
+ st5
stereo
themis
twins
diff --git a/docs/source/rbsp.rst b/docs/source/rbsp.rst
index 79e8c2c3..f9b54376 100644
--- a/docs/source/rbsp.rst
+++ b/docs/source/rbsp.rst
@@ -56,8 +56,13 @@ Example
import pyspedas
from pytplot import tplot
- rbspice_vars = pyspedas.rbsp.rbspice(trange=['2018-11-5', '2018-11-6'], datatype='tofxeh', level='l3')
- tplot('Alpha')
+ rbspice_vars = pyspedas.rbsp.rbspice(trange=['2018-11-5', '2018-11-6'], datatype='TOFxEH', level='l3')
+ tplot('rbspa_rbspice_l3_TOFxEH_proton_omni_spin')
+ # calculate the pitch angle distributions
+ from pyspedas.rbsp.rbspice_lib.rbsp_rbspice_pad import rbsp_rbspice_pad
+ rbsp_rbspice_pad(probe='a', datatype='TOFxEH', level='l3')
+ tplot(['rbspa_rbspice_l3_TOFxEH_proton_omni_spin',
+ 'rbspa_rbspice_l3_TOFxEH_proton_omni_0-1000keV_pad_spin'])
.. image:: _static/rbsp_rbspice.png
:align: center
@@ -65,7 +70,6 @@ Example
-
Energetic Particle, Composition, and Thermal Plasma Suite (ECT) - MagEIS
----------------------------------------------------------
.. autofunction:: pyspedas.rbsp.mageis
diff --git a/docs/source/soho.rst b/docs/source/soho.rst
new file mode 100644
index 00000000..1df92070
--- /dev/null
+++ b/docs/source/soho.rst
@@ -0,0 +1,84 @@
+Solar & Heliospheric Observatory (SOHO)
+========================================================================
+The routines in this module can be used to load data from the Solar & Heliospheric Observatory (SOHO) mission.
+
+
+Charge, Element, and Isotope Analysis System (CELIAS)
+----------------------------------------------------------
+.. autofunction:: pyspedas.soho.celias
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ celias_vars = pyspedas.soho.celias(trange=['2006-06-01', '2006-06-02'])
+ tplot(['V_p', 'N_p'])
+
+.. image:: _static/soho_celias.png
+ :align: center
+ :class: imgborder
+
+
+Comprehensive Suprathermal and Energetic Particle Analyzer (COSTEP)
+----------------------------------------------------------
+.. autofunction:: pyspedas.soho.costep
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ costep_vars = pyspedas.soho.costep(trange=['2006-06-01', '2006-06-02'])
+ tplot(['P_int', 'He_int'])
+
+.. image:: _static/soho_costep.png
+ :align: center
+ :class: imgborder
+
+
+Energetic and Relativistic Nuclei and Electron experiment (ERNE)
+----------------------------------------------------------
+.. autofunction:: pyspedas.soho.erne
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ erne_vars = pyspedas.soho.erne(trange=['2006-06-01', '2006-06-02'])
+ tplot('PH')
+
+.. image:: _static/soho_erne.png
+ :align: center
+ :class: imgborder
+
+
+Orbit (ephemeris and attitude) data (ORBIT)
+----------------------------------------------------------
+.. autofunction:: pyspedas.soho.orbit
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ orbit_vars = pyspedas.soho.orbit(trange=['2006-06-01', '2006-06-02'])
+ tplot(['GSE_POS', 'GSE_VEL'])
+
+.. image:: _static/soho_orbit.png
+ :align: center
+ :class: imgborder
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/source/st5.rst b/docs/source/st5.rst
new file mode 100644
index 00000000..7033b5ac
--- /dev/null
+++ b/docs/source/st5.rst
@@ -0,0 +1,27 @@
+Space Technology 5 (ST5)
+========================================================================
+The routines in this module can be used to load data from the Space Technology 5 (ST5) mission.
+
+
+Magnetometer (MAG)
+----------------------------------------------------------
+.. autofunction:: pyspedas.st5.mag
+
+Example
+^^^^^^^^^
+
+.. code-block:: python
+
+ import pyspedas
+ from pytplot import tplot
+ st5_vars = pyspedas.st5.mag(trange=['2006-06-01', '2006-06-02'])
+ tplot(['B_SM', 'SC_POS_SM'])
+
+.. image:: _static/st5_mag.png
+ :align: center
+ :class: imgborder
+
+
+
+
+
\ No newline at end of file
diff --git a/pyspedas/__init__.py b/pyspedas/__init__.py
index a8188891..3360b1c9 100644
--- a/pyspedas/__init__.py
+++ b/pyspedas/__init__.py
@@ -1,25 +1,25 @@
from .version import version
-from .utilities.data_exists import data_exists
-from .utilities.tnames import tnames
-from .utilities.time_string import time_string, time_datetime
-from .utilities.time_double import time_float, time_double
+from pytplot import data_exists
+from pytplot import tnames
+from pytplot import time_string, time_datetime
+from pytplot import time_float, time_double
from .utilities.tcopy import tcopy
-from .utilities.tkm2re import tkm2re
+from pytplot import tkm2re
from .analysis.avg_data import avg_data
-from .analysis.clean_spikes import clean_spikes
+from pytplot import clean_spikes
from .analysis.deriv_data import deriv_data
from .analysis.dpwrspc import dpwrspc
-from .analysis.subtract_average import subtract_average
-from .analysis.subtract_median import subtract_median
-from .analysis.time_clip import time_clip
+from pytplot import subtract_average
+from pytplot import subtract_median
+from pytplot import time_clip
from .analysis.tdeflag import tdeflag
from .analysis.tdpwrspc import tdpwrspc
from .analysis.tinterpol import tinterpol
-from .analysis.tnormalize import tnormalize
-from .analysis.tdotp import tdotp
-from .analysis.tcrossp import tcrossp
-from .analysis.tsmooth import tsmooth
+from pytplot import tnormalize
+from pytplot import tdotp
+from pytplot import tcrossp
+from pytplot import tsmooth
from .analysis.yclip import yclip
from .analysis.twavpol import twavpol
from pytplot import cdf_to_tplot
@@ -27,9 +27,12 @@
from .cotrans.cotrans import cotrans
from .cotrans.cotrans_get_coord import cotrans_get_coord
from .cotrans.cotrans_set_coord import cotrans_set_coord
+from .cotrans.tvector_rotate import tvector_rotate
-from .mms import mms_load_mec, mms_load_fgm, mms_load_scm, mms_load_edi, mms_load_edp, mms_load_eis, mms_load_feeps, \
- mms_load_hpca, mms_load_fpi, mms_load_aspoc, mms_load_dsp, mms_load_fsm, mms_load_state
+from .mms import mms_load_mec, mms_load_fgm, mms_load_scm, mms_load_edi, \
+ mms_load_edp, mms_load_eis, mms_load_feeps, \
+ mms_load_hpca, mms_load_fpi, mms_load_aspoc, \
+ mms_load_dsp, mms_load_fsm, mms_load_state
from .mms.feeps.mms_feeps_pad import mms_feeps_pad
from .mms.feeps.mms_feeps_gpd import mms_feeps_gpd
from .mms.eis.mms_eis_pad import mms_eis_pad
@@ -37,6 +40,7 @@
from .mms.hpca.mms_hpca_spin_sum import mms_hpca_spin_sum
from .maven import maven_load
+from .sosmag.load import sosmag_load
from . import erg
from . import ulysses
@@ -63,3 +67,52 @@
from . import secs
from . import kyoto
from . import swarm
+from . import vires
+from . import cnofs
+from . import lanl
+from . import st5
+from . import de2
+from . import akebono
+from . import soho
+from . import barrel
+
+# set up logging/console output
+import logging
+from os import environ
+
+logging_level = environ.get('PYTPLOT_LOGGING_LEVEL')
+logging_format = environ.get('PYTPLOT_LOGGING_FORMAT')
+logging_date_fmt = environ.get('PYTPLOT_LOGGING_DATE_FORMAT')
+
+if logging_format is None:
+ logging_format = '%(asctime)s: %(message)s'
+
+if logging_date_fmt is None:
+ logging_date_fmt = '%d-%b-%y %H:%M:%S'
+
+if logging_level is None:
+ logging_level = logging.INFO
+else:
+ logging_level = logging_level.lower()
+ if logging_level == 'debug':
+ logging_level = logging.DEBUG
+ elif logging_level == 'info':
+ logging_level = logging.INFO
+ elif logging_level == 'warn' or logging_level == 'warning':
+ logging_level = logging.WARNING
+ elif logging_level == 'error':
+ logging_level = logging.ERROR
+ elif logging_level == 'critical':
+ logging_level = logging.CRITICAL
+
+logging.captureWarnings(True)
+
+# basicConfig here doesn't work if it has previously been called
+logging.basicConfig(format=logging_format, datefmt=logging_date_fmt, level=logging_level)
+
+# manually set the logger options from the defaults/environment variables
+logger = logging.getLogger()
+logger_handler = logger.handlers[0] # should exist since basicConfig has been called
+logger_fmt = logging.Formatter(logging_format, logging_date_fmt)
+logger_handler.setFormatter(logger_fmt)
+logger.setLevel(logging_level)
diff --git a/pyspedas/ace/__init__.py b/pyspedas/ace/__init__.py
index 6c4b1f86..7a590e83 100644
--- a/pyspedas/ace/__init__.py
+++ b/pyspedas/ace/__init__.py
@@ -1,6 +1,7 @@
-
from .load import load
from pytplot import options
+from pyspedas.utilities.datasets import find_datasets
+
def mfi(trange=['2018-11-5', '2018-11-6'],
datatype='h3',
@@ -82,6 +83,7 @@ def mfi(trange=['2018-11-5', '2018-11-6'],
return tvars
+
def swe(trange=['2018-11-5', '2018-11-6'],
datatype='h0',
suffix='',
@@ -146,6 +148,7 @@ def swe(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='swe', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def epam(trange=['2018-11-5', '2018-11-6'],
datatype='k0',
suffix='',
@@ -211,6 +214,7 @@ def epam(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='epm', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def cris(trange=['2018-11-5', '2018-11-6'],
datatype='h2',
suffix='',
@@ -273,6 +277,7 @@ def cris(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='cris', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def sis(trange=['2018-11-5', '2018-11-6'],
datatype='k0',
suffix='',
@@ -460,6 +465,7 @@ def sepica(trange=['2004-11-5', '2004-11-6'],
"""
return load(instrument='sep', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def swics(trange=['2018-11-5', '2018-11-6'],
datatype='sw2_h3',
suffix='',
@@ -525,3 +531,7 @@ def swics(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='swics', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='ACE', instrument=instrument, label=label)
diff --git a/pyspedas/ace/load.py b/pyspedas/ace/load.py
index cb946046..ddd3259e 100644
--- a/pyspedas/ace/load.py
+++ b/pyspedas/ace/load.py
@@ -3,7 +3,7 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/ace/tests/tests.py b/pyspedas/ace/tests/tests.py
index 116b3ef9..43f4548f 100644
--- a/pyspedas/ace/tests/tests.py
+++ b/pyspedas/ace/tests/tests.py
@@ -1,14 +1,21 @@
-
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_load_mfi_data(self):
- mfi_vars = pyspedas.ace.mfi(trange=['2018-11-5', '2018-11-6'])
+ mfi_vars = pyspedas.ace.mfi(trange=['2018-11-5', '2018-11-6'], time_clip=True)
self.assertTrue(data_exists('Magnitude'))
+ def test_load_mfi_notplot(self):
+ mfi_vars = pyspedas.ace.mfi(trange=['2018-11-5', '2018-11-6'], notplot=True)
+ self.assertTrue(isinstance(mfi_vars, dict))
+
+ def test_load_mfi_downloadonly(self):
+ mfi_vars = pyspedas.ace.mfi(trange=['2018-11-5', '2018-11-6'], downloadonly=True)
+ self.assertTrue(isinstance(mfi_vars, list))
+
def test_load_swe_data(self):
swe_vars = pyspedas.ace.swe()
self.assertTrue(data_exists('Np'))
@@ -44,5 +51,9 @@ def test_load_swi_data(self):
swi_vars = pyspedas.ace.swics()
self.assertTrue(data_exists('vHe2'))
+ def test_data_dir(self):
+ self.assertTrue(pyspedas.ace.config.CONFIG['local_data_dir'] == 'ace_data/')
+
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/akebono/README.md b/pyspedas/akebono/README.md
new file mode 100644
index 00000000..419facd7
--- /dev/null
+++ b/pyspedas/akebono/README.md
@@ -0,0 +1,45 @@
+
+## Akebono
+The routines in this module can be used to load data from the Akebono mission.
+
+### Instruments
+- Plasma Waves and Sounder experiment (PWS)
+- Radiation Moniter (RDM)
+- Orbit data (orb)
+
+### Examples
+Get started by importing pyspedas and tplot; these are required to load and plot the data:
+
+```python
+import pyspedas
+from pytplot import tplot
+```
+
+#### Plasma Waves and Sounder experiment (PWS)
+
+```python
+pws_vars = pyspedas.akebono.pws(trange=['2012-10-01', '2012-10-02'])
+
+tplot(['akb_pws_RX1', 'akb_pws_RX2'])
+```
+
+
+#### Radiation Moniter (RDM)
+
+```python
+rdm_vars = pyspedas.akebono.rdm(trange=['2012-10-01', '2012-10-02'])
+
+tplot('akb_rdm_FEIO')
+```
+
+
+#### Orbit data (orb)
+
+```python
+orb_vars = pyspedas.akebono.orb(trange=['2012-10-01', '2012-10-02'])
+
+tplot(['akb_orb_geo', 'akb_orb_MLT'])
+```
+
+
+
\ No newline at end of file
diff --git a/pyspedas/akebono/__init__.py b/pyspedas/akebono/__init__.py
new file mode 100644
index 00000000..fa0279e7
--- /dev/null
+++ b/pyspedas/akebono/__init__.py
@@ -0,0 +1,344 @@
+from .load import load
+import numpy as np
+import pandas as pd
+from pytplot import store_data, options
+from pyspedas import time_double
+from pyspedas.cotrans.xyz_to_polar import xyz_to_polar
+
+
+def pws(trange=['2012-10-01', '2012-10-02'],
+ datatype='ne',
+ level='h1',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Plasma Waves and Sounder experiment (PWS)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'ne', 'npw-ds', 'npw-py', 'spw'
+
+ level: str
+ Data level; options: 'h1' (default: h1)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='pws', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return pws_postprocessing(tvars)
+
+
+def pws_postprocessing(variables):
+ """
+ Placeholder for PWS post-processing
+ """
+ return variables
+
+
+def rdm(trange=['2012-10-01', '2012-10-02'],
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Radiation Moniter (RDM)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ files = load(instrument='rdm', trange=trange, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if files is None or notplot or downloadonly:
+ return files
+
+ return rdm_postprocessing(files)
+
+
+def rdm_postprocessing(files):
+ """
+ Load the RDM ASCII files into tplot variables
+ """
+ data = load_csv_file(files)
+ values = data.to_numpy()
+ unix_times = time_double([ymd + '/' + hms for ymd, hms in zip(values[:, 0], values[:, 1])])
+
+ L = np.float64(values[:, 2])
+ INV = np.float64(values[:, 3])
+ FMLAT = np.float64(values[:, 4])
+ MLAT = np.float64(values[:, 5])
+ MLT = np.float64(values[:, 6])
+ ALT = np.float64(values[:, 7])
+ GLAT = np.float64(values[:, 8])
+ GLON = np.float64(values[:, 9])
+ RDM_E3 = np.float64(values[:, 10])
+ Energy = np.zeros(len(RDM_E3))
+ Energy[:] = 2.5
+
+ prefix_project = 'akb_'
+ prefix_descriptor = 'rdm_'
+ prefix = prefix_project + prefix_descriptor
+
+ store_data(prefix_project+'L', data={'x': unix_times, 'y': L})
+ store_data(prefix_project+'INV', data={'x': unix_times, 'y': INV})
+ store_data(prefix_project+'FMLAT', data={'x': unix_times, 'y': FMLAT})
+ store_data(prefix_project+'MLAT', data={'x': unix_times, 'y': MLAT})
+ store_data(prefix_project+'MLT', data={'x': unix_times, 'y': MLT})
+ store_data(prefix_project+'ALT', data={'x': unix_times, 'y': ALT})
+ store_data(prefix_project+'GLAT', data={'x': unix_times, 'y': GLAT})
+ store_data(prefix_project+'GLON', data={'x': unix_times, 'y': GLON})
+ store_data(prefix+'FEIO', data={'x': unix_times, 'y': RDM_E3})
+ store_data(prefix+'FEIO_Energy', data={'x': unix_times, 'y': Energy})
+
+ options(prefix+'FEIO', 'spec', True)
+
+ options(prefix_project+'L', 'ytitle', 'L-value')
+ options(prefix_project+'INV', 'ytitle', 'Invariant Latitude [deg]')
+ options(prefix_project+'FMLAT', 'ytitle', 'Footprint Latitude [deg]')
+ options(prefix_project+'MLAT', 'ytitle', 'Magnetic Latitude [deg]')
+ options(prefix_project+'MLT', 'ytitle', 'Magnetic Local Time [hour]')
+ options(prefix_project+'ALT', 'ytitle', 'Altitude [km]')
+ options(prefix_project+'GLAT', 'ytitle', 'Geographic Latitude [deg]')
+ options(prefix_project+'GLON', 'ytitle', 'Geographic Longitude [deg]')
+ options(prefix+'FEIO', 'ytitle', 'Omni-directional Integral Electron Flux')
+ options(prefix+'FEIO', 'ysubtitle', '[/cm^22 sec str]')
+ options(prefix+'FEIO_Energy', 'ytitle', 'Elctron energy [MeV]')
+
+ return [prefix_project+'L',
+ prefix_project+'INV',
+ prefix_project+'FMLAT',
+ prefix_project+'MLAT',
+ prefix_project+'MLT',
+ prefix_project+'ALT',
+ prefix_project+'GLAT',
+ prefix_project+'GLON',
+ prefix+'FEIO',
+ prefix+'FEIO_Energy']
+
+
+def orb(trange=['2012-10-01', '2012-10-02'],
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Akebono orbit data (orb)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ files = load(instrument='orb', trange=trange, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if files is None or notplot or downloadonly:
+ return files
+
+ return orb_postprocessing(files)
+
+
+def orb_postprocessing(files):
+ """
+ Load the orbit CSV files and create the tplot variables
+ """
+ prefix_project = 'akb_'
+ prefix_descriptor = 'orb_'
+ prefix = prefix_project + prefix_descriptor
+
+ cols = ['pass','ut', 'ksc_azm', 'ksc_elv', 'ksc_dis', 'ksc_ang', 'syo_azm', 'syo_elv', 'syo_dis', 'syo_ang',
+ 'pra_azm', 'pra_elv', 'pra_dis', 'pra_ang', 'esr_azm', 'esr_elv', 'esr_dis', 'esr_ang', 'gclat','gclon',
+ 'inv', 'fmlat', 'mlat', 'mlt', 'bmdl_x', 'bmdl_y', 'bmdl_z', 'xxlon_sc', 'xxlat_sc', 'aheight','lsun',
+ 's_direc_x', 's_direc_y', 's_direc_z', 'sc_pos_x', 'sc_pos_y', 'sc_pos_z', 'sc_vel_x', 'sc_vel_y', 'sc_vel_z']
+
+ data = load_csv_file(files, cols=cols)
+ values = data.to_numpy()
+ unix_times = time_double([date[2:4] + '-' + date[4:6] + '-' + date[0:2] + '/' + date[6:8] + ':' + date[8:10] + ':' + date[10:12] for date in data['ut']])
+
+ km_in_re = 6374.4
+
+ xyz = np.array([[data['sc_pos_x']], [data['sc_pos_y']], [data['sc_pos_z']]]).transpose([2, 0, 1]).squeeze()
+ xyz = np.float64(xyz)
+ xyz_re = xyz/km_in_re
+ r_theta_phi = xyz_to_polar(xyz)
+ rr = r_theta_phi[:, 0]
+ th = r_theta_phi[:, 1]
+ ph = r_theta_phi[:, 2]
+ store_data(prefix + 'geo', data={'x': unix_times, 'y': xyz_re})
+ store_data(prefix + 'gdlat', data={'x': unix_times, 'y': np.float64(data['gclat'])})
+ store_data(prefix + 'gdlon', data={'x': unix_times, 'y': np.float64(data['gclon'])})
+ store_data(prefix + 'inv', data={'x': unix_times, 'y': np.float64(data['inv'])})
+ store_data(prefix + 'fmlat', data={'x': unix_times, 'y': np.float64(data['fmlat'])})
+ store_data(prefix + 'MLT', data={'x': unix_times, 'y': np.float64(data['mlt'])})
+ store_data(prefix + 'gcalt', data={'x': unix_times, 'y': rr / km_in_re})
+ store_data(prefix + 'gclat', data={'x': unix_times, 'y': th})
+ store_data(prefix + 'gclon', data={'x': unix_times, 'y': ph})
+ options(prefix + 'geo', 'ytitle', 'GEO')
+ options(prefix + 'geo', 'ysubtitle', '[Re]')
+ options(prefix + 'gdlat', 'ytitle', 'Geodetic latitude of the magnetic footprint')
+ options(prefix + 'gdlat', 'ysubtitle', '(120km altitude) [deg]')
+ options(prefix + 'gdlon', 'ytitle', 'Geodetic longitude of the magnetic footprint')
+ options(prefix + 'gdlon', 'ysubtitle', '(120km altitude) [deg]')
+ options(prefix + 'inv', 'ytitle', 'Invariant Latitude of the magnetic footprint')
+ options(prefix + 'inv', 'ysubtitle', '(120km altitude) [deg]')
+ options(prefix + 'fmlat', 'ytitle', 'Geomagnetic Latitude of the magnetic footprint')
+ options(prefix + 'fmlat', 'ysubtitle', '(120km altitude) [deg]')
+ options(prefix + 'MLT', 'ytitle', 'Magnetic Local Time')
+ options(prefix + 'MLT', 'ysubtitle', '[hours]')
+ options(prefix + 'gcalt', 'ytitle', 'Geocentric Altitude')
+ options(prefix + 'gcalt', 'ysubtitle', '[Re]')
+ options(prefix + 'gclat', 'ytitle', 'Geocentric Latitude')
+ options(prefix + 'gclat', 'ysubtitle', '[deg]')
+ options(prefix + 'gclon', 'ytitle', 'Geocentric Longitude')
+ options(prefix + 'gclon', 'ysubtitle', '[deg]')
+
+ return [prefix + 'geo',
+ prefix + 'gdlat',
+ prefix + 'gdlon',
+ prefix + 'inv',
+ prefix + 'fmlat',
+ prefix + 'MLT',
+ prefix + 'gcalt',
+ prefix + 'gclat',
+ prefix + 'gclon']
+
+
+def load_csv_file(filenames, cols=None):
+ """
+ Loads a list of CSV/txt files into pandas data frames
+ """
+ if not isinstance(filenames, list):
+ filenames = [filenames]
+ df = pd.concat((pd.read_csv(f, header=0, delim_whitespace=True, dtype=str, names=cols) for f in filenames), ignore_index=True)
+ return df
diff --git a/pyspedas/akebono/config.py b/pyspedas/akebono/config.py
new file mode 100644
index 00000000..2084e2f9
--- /dev/null
+++ b/pyspedas/akebono/config.py
@@ -0,0 +1,12 @@
+import os
+
+CONFIG = {'local_data_dir': 'akebono_data/',
+ 'remote_data_dir': 'http://darts.isas.jaxa.jp/stp/data/exosd/'}
+
+# override local data directory with environment variables
+if os.environ.get('SPEDAS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'akebono'])
+
+if os.environ.get('AKEBONO_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.environ['AKEBONO_DATA_DIR']
+
\ No newline at end of file
diff --git a/pyspedas/akebono/load.py b/pyspedas/akebono/load.py
new file mode 100644
index 00000000..966b318f
--- /dev/null
+++ b/pyspedas/akebono/load.py
@@ -0,0 +1,73 @@
+import logging
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pytplot import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+
+def load(trange=['2012-10-01', '2012-10-02'],
+ instrument='pws',
+ datatype='epd',
+ level='l2',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Akebono mission; this function is not meant
+ to be called directly; instead, see the wrappers:
+
+ pyspedas.akebono.pws
+ pyspedas.akebono.rdm
+ pyspedas.akebono.orb
+
+ """
+ prefix = ''
+
+ if instrument == 'pws':
+ # only PWS data are available in CDF files
+ prefix = 'akb_pws_'
+ pathformat = instrument + '/NPW-DS/%Y/ak_h1_pws_%Y%m%d_v??.cdf'
+ elif instrument == 'rdm':
+ prefix = 'akb_rdm_'
+ pathformat = instrument + '/%Y/sf%y%m%d'
+ elif instrument == 'orb':
+ prefix = 'akb_orb_'
+ pathformat = 'orbit/daily/%Y%m/ED%y%m%d.txt'
+ else:
+ logging.error('Unknown instrument: ' + instrument)
+ return
+
+ # find the full remote path names using the trange
+ remote_names = dailynames(file_format=pathformat, trange=trange)
+
+ out_files = []
+
+ files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update)
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ out_files = sorted(out_files)
+
+ if downloadonly or instrument != 'pws':
+ return out_files
+
+ tvars = cdf_to_tplot(out_files, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
+
+ if notplot:
+ return tvars
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
+
+
\ No newline at end of file
diff --git a/pyspedas/akebono/tests/__init__.py b/pyspedas/akebono/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/akebono/tests/tests.py b/pyspedas/akebono/tests/tests.py
new file mode 100644
index 00000000..37be8e5c
--- /dev/null
+++ b/pyspedas/akebono/tests/tests.py
@@ -0,0 +1,37 @@
+import os
+import unittest
+from pytplot import data_exists
+import pyspedas
+
+
+class LoadTestCases(unittest.TestCase):
+ def test_load_pws_data(self):
+ out_vars = pyspedas.akebono.pws(time_clip=True)
+ self.assertTrue(data_exists('akb_pws_RX1'))
+ self.assertTrue(data_exists('akb_pws_RX2'))
+
+ def test_load_rdm_data(self):
+ out_vars = pyspedas.akebono.rdm()
+ self.assertTrue(data_exists('akb_L'))
+ self.assertTrue(data_exists('akb_MLT'))
+ self.assertTrue(data_exists('akb_rdm_FEIO'))
+
+ def test_load_orb_data(self):
+ out_vars = pyspedas.akebono.orb()
+ self.assertTrue(data_exists('akb_orb_geo'))
+ self.assertTrue(data_exists('akb_orb_gdlat'))
+ self.assertTrue(data_exists('akb_orb_gdlon'))
+
+ def test_load_notplot(self):
+ out_vars = pyspedas.akebono.pws(notplot=True)
+ self.assertTrue('akb_pws_RX1' in out_vars)
+
+ def test_downloadonly(self):
+ files = pyspedas.akebono.pws(downloadonly=True, trange=['2012-10-01', '2012-10-02'])
+ self.assertTrue(os.path.exists(files[0]))
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+
\ No newline at end of file
diff --git a/pyspedas/analysis/avg_data.py b/pyspedas/analysis/avg_data.py
index b85e2425..57d905ab 100644
--- a/pyspedas/analysis/avg_data.py
+++ b/pyspedas/analysis/avg_data.py
@@ -6,13 +6,14 @@
Similar to avg_data.pro in IDL SPEDAS.
"""
+import logging
import numpy as np
-import pyspedas
-import pytplot
+from pyspedas import tnames, time_float
+from pytplot import store_data, get_data
-def avg_data(names, dt=None, width=60, noremainder=False,
- new_names=None, suffix=None, overwrite=None):
+def avg_data(names, trange=[], res=None, width=None,
+ new_names=None, suffix=None, overwrite=False):
"""
Get a new tplot variable with averaged data.
@@ -20,38 +21,41 @@ def avg_data(names, dt=None, width=60, noremainder=False,
----------
names: str/list of str
List of pytplot names.
- dt: float, optional
- Time window in seconds for averaging data. It can be less than 1 sec.
+ trange: list of float, optional
+ Start time, end time.
+ If empty, the data start and end time will be used.
+ res: float, optional
+ Time resolution in seconds for averaging data.
+ It can be less than 1 sec.
+ Default is 60 sec.
width: int, optional
Number of values for the averaging window.
- Default is 60 points (usually this means 60 seconds).
- If dt is set, then width is ignored.
- noremainder: boolean, optional
- If True, the remainter (last part of data) will not be included.
- If False. the remainter will be included.
+ If res is set, then width is ignored.
new_names: str/list of str, optional
List of new_names for pytplot variables.
If not given, then a suffix is applied.
suffix: str, optional
- A suffix to apply. Default is '-avg'.
+ A suffix to apply.
+ Default is '-avg'.
overwrite: bool, optional
Replace the existing tplot name.
+ Default is False.
Returns
-------
None.
"""
- old_names = pyspedas.tnames(names)
+ old_names = tnames(names)
- if len(old_names) < 1:
- print('avg_data error: No pytplot names were provided.')
+ if names is None or len(old_names) < 1:
+ logging.error('avg_data error: No pytplot names were provided.')
return
if suffix is None:
suffix = '-avg'
- if overwrite is not None:
+ if overwrite:
n_names = old_names
elif new_names is None:
n_names = [s + suffix for s in old_names]
@@ -67,93 +71,134 @@ def avg_data(names, dt=None, width=60, noremainder=False,
for old_idx, old in enumerate(old_names):
new = n_names[old_idx]
- d = pytplot.data_quants[old].copy()
- data = d.values
- time = d.time.values
+ # Get times and data
+ d = get_data(old)
+ metadata = get_data(old, metadata=True)
+ time = d[0]
+ time = np.array(time_float(time))
+ time_len = len(time)
+
+ data = np.array(d[1])
dim = data.shape
dim0 = dim[0]
+ if dim0 != time_len:
+ logging.error('avg_data: Data and time length mismatch.')
+ continue
if len(dim) < 2:
dim1 = 1
else:
dim1 = dim[1]
- new_data = []
- new_time = []
- if dt is None:
- # Use width
- width = int(width)
- # print(dim0, width)
- for i in range(0, dim0, width):
- last = (i + width) if (i + width) < dim0 else dim0
- # idx = int(i + width/2) # redefined below before it's ever used?
- if (i + width > dim0) and noremainder:
- continue # Skip the last part of data.
- else:
- idx = int((i + last - 1)/2) # Include the last part.
- new_time.append(time[idx])
-
- if dim1 < 2:
- nd0 = np.average(data[i:last])
+ # Data may also contain v, v1, v2, v3
+ process_energies = []
+ retain_energies = []
+ for i in range(len(d)):
+ if i > 1:
+ if len(d[i]) == len(time):
+ process_energies.append(i)
else:
- nd0 = []
- for j in range(dim1):
- nd0.append(np.average(data[i:last, j]))
- new_data.append(nd0)
+ # These will retained in the results as-is
+ retain_energies.append(i)
+ process_v = {}
+ for i in process_energies:
+ process_v[d._fields[i]] = []
+
+ # Find start and end times
+ if trange is not None:
+ trange = time_float(trange)
+ if len(trange) == 2 and trange[0] < trange[1]:
+ time_start = trange[0]
+ time_end = trange[1]
+ else:
+ time_start = time[0]
+ time_end = time[-1]
+
+ if time_start < time[0]:
+ time_start = time[0]
+ if time_end > time[-1]:
+ time_end = time[-1]
+
+ # Check for empty set
+ count_in_range = len(time[(time >= time_start) & (time <= time_end)])
+ if time_end <= time_start or count_in_range < 2:
+ logging.error('avg_data: No time values in provided time range.')
+ continue
+
+ # Find time bins
+
+ time_duration = time_end - time_start
+ if res is None and width is None:
+ res = 60 # Default is 60 sec
+
+ if res is not None:
+ # Given the resolution, compute bins
+ dt = res
+ bin_count = int(time_duration/dt)
+ ind = np.floor((time-time_start)/dt)
+ else:
+ # Given the width, compute bins
+ bins = np.arange(count_in_range)
+ ind = np.floor(bins/width)
+ bin_count = int(count_in_range/width)
+ dt = time_duration/bin_count
+
+ if bin_count < 2:
+ msg = 'avg_data: too few bins. Bins=' + str(bin_count) \
+ + ', Data points=' + str(count_in_range)
+ logging.error(msg)
+ continue
+
+ # Split time into bins
+ mdt = (time_end-time_start)/dt
+ if (mdt-int(mdt) >= 0.5):
+ max_ind = np.ceil(mdt)
else:
- # Use dt
- dt = float(dt)
- timedbl = np.array(pyspedas.time_float(time))
- alldt = timedbl[-1] - timedbl[0]
- if not dt > 0.0:
- print("avg_data: Time interval dt<=0.0. Exiting.")
- return
- if dt > alldt:
- print("avg_data: Time interval dt is too large. Exiting.")
- return
-
- # Find bins for time: equal bins of length dt.
- bincount = int(alldt/dt)
- if alldt % dt > 0.0 and not noremainder: # residual bin
- # Include the last bin which might not be the same size.
- bincount += 1
-
- time0 = timedbl[0]
- maxtime = timedbl[-1]
- for i in range(bincount):
- time1 = time0 + dt
- bintime = time0 + dt/2.0
- if bintime > maxtime:
- bintime = maxtime
- new_time.append(bintime)
- # Find all indexes between time0 and time1.
- idx = np.where((timedbl >= time0) & (timedbl < time1))
-
- # Check if idx is empty, ie. there is a gap in data.
- idx_is_empty = False
- if not idx:
- idx_is_empty = True
- elif len(idx) == 1:
- if len(idx[0]) == 0:
- idx_is_empty = True
-
- if dim1 < 2:
- if idx_is_empty: # Empty list.
- nd0 = np.nan
- else:
- nd0 = np.average(data[idx])
+ max_ind = np.floor(mdt)
+ w1 = np.asarray(ind < 0).nonzero()
+ ind[w1] = -1
+ w2 = np.asarray(ind >= max_ind).nonzero()
+ ind[w2] = -1
+
+ # Find new times
+ mx = np.max(ind)+1
+ new_times = (np.arange(mx)+0.5)*dt + time_start
+
+ # Find new data
+ new_data = []
+ for i in range(int(max_ind)):
+ if i < 0:
+ continue
+
+ idx0 = np.asarray(ind == i).nonzero()
+ isempty = True if len(idx0) < 1 else False
+
+ if dim1 < 2:
+ nd0 = np.nan if isempty else np.average(data[idx0])
+ else:
+ nd0 = []
+ for j in range(dim1):
+ nd0.append(np.nan) if isempty else nd0.append(np.average(data[idx0, j]))
+ new_data.append(nd0)
+
+ for i in process_energies:
+ # The following processes v, v1, v2, v3
+ dime1 = len(d[i][0])
+ if dime1 < 2:
+ nd1 = np.nan if isempty else np.average(d[i][idx0])
else:
- nd0 = []
- for j in range(dim1):
- if idx_is_empty: # Empty list.
- nd0.append(np.nan)
- else:
- nd0.append(np.average(data[idx, j]))
- new_data.append(nd0)
- time0 = time1
-
- pytplot.store_data(new, data={'x': new_time, 'y': new_data})
- # copy attributes
- pytplot.data_quants[new].attrs = d.attrs.copy()
-
- print('avg_data was applied to: ' + new)
+ nd1 = []
+ for j in range(dime1):
+ nd1.append(np.nan) if isempty else nd1.append(np.average(d[i][idx0, j]))
+ process_v[d._fields[i]].append(nd1)
+
+ # Create the new pytplot variable
+ data_dict = {'x': new_times, 'y': new_data}
+ for i in retain_energies:
+ data_dict[d._fields[i]] = d[i]
+ for i in process_energies:
+ data_dict[d._fields[i]] = process_v[d._fields[i]]
+
+ store_data(new, data=data_dict, attr_dict=metadata)
+
+ logging.info('avg_data was applied to: ' + new)
diff --git a/pyspedas/analysis/clean_spikes.py b/pyspedas/analysis/clean_spikes.py
index ad4f8421..1a4601f2 100644
--- a/pyspedas/analysis/clean_spikes.py
+++ b/pyspedas/analysis/clean_spikes.py
@@ -6,11 +6,8 @@
Similar to clean_spikes.pro in IDL SPEDAS.
"""
-import numpy as np
-import pyspedas
+import logging
import pytplot
-from pyspedas.analysis.subtract_average import subtract_average
-from pyspedas.analysis.tsmooth import tsmooth
def clean_spikes(names, nsmooth=10, thresh=0.3, sub_avg=False,
@@ -42,72 +39,7 @@ def clean_spikes(names, nsmooth=10, thresh=0.3, sub_avg=False,
None.
"""
- old_names = pyspedas.tnames(names)
-
- if len(old_names) < 1:
- print('clean_spikes error: No pytplot names were provided.')
- return
-
- if suffix is None:
- suffix = '-despike'
-
- if overwrite is not None:
- n_names = old_names
- elif new_names is None:
- n_names = [s + suffix for s in old_names]
- else:
- n_names = new_names
-
- if isinstance(n_names, str):
- n_names = [n_names]
-
- if len(n_names) != len(old_names):
- n_names = [s + suffix for s in old_names]
-
- for old_idx, old in enumerate(old_names):
- new = n_names[old_idx]
- tmp = new + '_tmp_data'
-
- # Create new
- if old != new:
- pyspedas.tcopy(old, new)
-
- # Perform subtract_average or just copy the values
- if sub_avg:
- subtract_average(new, new_names=tmp)
- else:
- pyspedas.tcopy(new, tmp)
-
- # Find spikes
- tmps = tmp + '-s'
- tsmooth(tmp, new_names=tmps, width=nsmooth)
- ds0 = pytplot.get_data(tmps) # smoothed out values
- ds = ds0[1]
- dor0 = pytplot.get_data(tmp) # original values
- d0 = dor0[1]
- dn = d0.copy() # final values
-
- dim = dn.shape
- if len(dim) == 1:
- # One dim data.
- for i in range(dim[0]):
- # compare smoothed out values to original values
- if abs(d0[i] - ds[i]) > thresh * abs(ds[i]):
- dn[i] = np.NaN # for spikes, set to NaN
- else:
- # More than one dim data.
- for j in range(dim[1]):
- # print("j = ", j)
- for i in range(dim[0]):
- # compare smoothed out values to original values
- if abs(d0[i, j] - ds[i, j]) > thresh * abs(ds[i, j]):
- dn[i, j] = np.NaN # for spikes, set to NaN
-
- # pytplot.data_quants[new] = d
- pytplot.replace_data(new, dn)
-
- # remove temp data
- del pytplot.data_quants[tmp]
- del pytplot.data_quants[tmps]
-
- print('clean_spikes was applied to: ' + new)
+ logging.info("clean_spikes has been moved to the pytplot.tplot_math package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ pytplot.tplot_math.clean_spikes(names=names, nsmooth=nsmooth, thresh=thresh, sub_avg=sub_avg, new_names=new_names,
+ suffix=suffix,overwrite=overwrite)
diff --git a/pyspedas/analysis/deriv_data.py b/pyspedas/analysis/deriv_data.py
index cb736ab1..f95f6ce0 100644
--- a/pyspedas/analysis/deriv_data.py
+++ b/pyspedas/analysis/deriv_data.py
@@ -6,6 +6,10 @@
Similar to deriv_data.pro in IDL SPEDAS.
"""
+import logging
+
+import numpy as np
+
import pyspedas
import pytplot
@@ -35,7 +39,7 @@ def deriv_data(names, new_names=None, suffix=None, overwrite=None):
old_names = pyspedas.tnames(names)
if len(old_names) < 1:
- print('deriv_data error: No pytplot names were provided.')
+ logging.error('deriv_data error: No pytplot names were provided.')
return
if suffix is None:
@@ -55,13 +59,7 @@ def deriv_data(names, new_names=None, suffix=None, overwrite=None):
n_names = [s + suffix for s in old_names]
for i, old in enumerate(old_names):
- new = n_names[i]
-
- if new != old:
- pyspedas.tcopy(old, new)
-
- data = pytplot.data_quants[new]
- data_new = data.differentiate('time').copy()
- pytplot.data_quants[new].values = data_new.values
-
- print('deriv_data was applied to: ' + new)
+ data = pytplot.get_data(old)
+ data_grad = np.gradient(data.y, data.times, axis = 0)
+ pytplot.store_data(n_names[i], data={'x': data.times, 'y': data_grad})
+ logging.info('deriv_data was applied to: ' + n_names[i])
diff --git a/pyspedas/analysis/dpwrspc.py b/pyspedas/analysis/dpwrspc.py
index 06a2c9d5..cd126d8e 100644
--- a/pyspedas/analysis/dpwrspc.py
+++ b/pyspedas/analysis/dpwrspc.py
@@ -6,6 +6,7 @@
Similar to dpwrspc.pro in IDL SPEDAS.
"""
+import logging
import numpy as np
@@ -64,10 +65,9 @@ def dpwrspc(time, quantity, nboxpoints=256, nshiftpoints=128, binsize=3,
# remove NaNs from the data
where_finite = np.where(np.isnan(quantity) == False)
-
quantity2process = quantity[where_finite[0]]
times2process = time[where_finite[0]]
- nboxpnts = nboxpoints
+ nboxpnts = int(nboxpoints)
nshiftpnts = nshiftpoints
totalpoints = len(times2process)
@@ -76,20 +76,20 @@ def dpwrspc(time, quantity, nboxpoints=256, nshiftpoints=128, binsize=3,
# test nspectra, if the value of nshiftpnts is much smaller than
# nboxpnts/2 strange things happen
- nbegin = np.array([nshiftpnts*i for i in range(nspectra)])
+ nbegin = np.array([nshiftpnts*i for i in range(nspectra)], dtype=np.int64)
nend = nbegin + nboxpnts
okspec = np.where(nend <= totalpoints-1)
if len(okspec[0]) <= 0:
- print('Not enough points for a calculation')
+ logging.error('Not enough points for a calculation')
return
tdps = np.zeros(nspectra)
nfreqs = int(int(nboxpnts/2)/binsize)
if nfreqs <= 1:
- print('Not enough frequencies for a calculation')
+ logging.error('Not enough frequencies for a calculation')
return
dps = np.zeros([nspectra, nfreqs])
@@ -99,14 +99,14 @@ def dpwrspc(time, quantity, nboxpoints=256, nshiftpoints=128, binsize=3,
nbegin = int(nthspectrum*nshiftpnts)
nend = nbegin + nboxpnts
- if nend <= totalpoints-1:
+ if nend <= totalpoints:
t = times2process[nbegin:nend]
t0 = t[0]
t = t - t0
x = quantity2process[nbegin:nend]
# Use center time
- tdps[nthspectrum] = (times2process[nbegin]+times2process[nend])/2.0
+ tdps[nthspectrum] = (times2process[nbegin]+times2process[nend-1])/2.0
if noline is False:
coef = np.polyfit(t, x, 1)
@@ -120,7 +120,7 @@ def dpwrspc(time, quantity, nboxpoints=256, nshiftpoints=128, binsize=3,
bign = nboxpnts
if bign % 2 != 0:
- print('dpwrspc: needs an even number of data points,\
+ logging.warning('dpwrspc: needs an even number of data points,\
dropping last point...')
t = t[0:bign-1]
x = x[0:bign-1]
@@ -184,4 +184,4 @@ def dpwrspc(time, quantity, nboxpoints=256, nshiftpoints=128, binsize=3,
dps[nthspectrum, :] = power
fdps[nthspectrum, :] = freqcenter
- return (tdps, fdps, dps)
+ return tdps, fdps, dps
diff --git a/pyspedas/analysis/lingradest.py b/pyspedas/analysis/lingradest.py
index 9333deba..3a90887f 100644
--- a/pyspedas/analysis/lingradest.py
+++ b/pyspedas/analysis/lingradest.py
@@ -1,5 +1,7 @@
+import logging
import numpy as np
+
def lingradest(Bx1, Bx2, Bx3, Bx4,
By1, By2, By3, By4,
Bz1, Bz2, Bz3, Bz4,
@@ -41,7 +43,7 @@ def lingradest(Bx1, Bx2, Bx3, Bx4,
len(By1) != datarrLength or len(By2) != datarrLength or len(By3) != datarrLength or len(By4) != datarrLength or \
len(Bz1) != datarrLength or len(Bz2) != datarrLength or len(Bz3) != datarrLength or len(Bz4) != datarrLength or \
R1.shape[0] != datarrLength or R2.shape[0] != datarrLength or R3.shape[0] != datarrLength or R4.shape[0] != datarrLength:
- print('Problem with input sizes; all input data should be interpolated to the same time stamps')
+ logging.error('Problem with input sizes; all input data should be interpolated to the same time stamps')
return
Rb = np.zeros((datarrLength, 3))
@@ -152,9 +154,9 @@ def lingradest(Bx1, Bx2, Bx3, Bx4,
RcurvB[i] = curvB[i]**(-1)
- print('Calculations completed')
+ logging.info('Calculations completed')
return {'Bxbc': Bxbc, 'Bybc': Bybc, 'Bzbc': Bzbc, 'Bbc': Bbc,
'LGBx': LGBx, 'LGBy': LGBy, 'LGBz': LGBz,
'LCxB': LCxB, 'LCyB': LCyB, 'LCzB': LCzB, 'LD': LD,
- 'curv_x_B': curv_x_B, 'curv_y_B': curv_y_B, 'curv_z_B': curv_z_B, 'RcurvB': RcurvB}
\ No newline at end of file
+ 'curv_x_B': curv_x_B, 'curv_y_B': curv_y_B, 'curv_z_B': curv_z_B, 'RcurvB': RcurvB}
diff --git a/pyspedas/analysis/neutral_sheet.py b/pyspedas/analysis/neutral_sheet.py
new file mode 100644
index 00000000..5f06adef
--- /dev/null
+++ b/pyspedas/analysis/neutral_sheet.py
@@ -0,0 +1,532 @@
+import logging
+import numpy as np
+from geopack.geopack import recalc as geopack_recalc
+from pyspedas import cotrans, time_double
+
+
+def sm_ns_model(time, gsm_pos, sc2NS=False):
+ """
+ This routine calculates the NS position along the zaxis at a specific x and y location.
+ """
+ # convert gsm to sm coordinates
+ sm_pos = cotrans(time_in=time, data_in=gsm_pos, coord_in='gsm', coord_out='sm')
+ zns = gsm_pos[:, 2] - sm_pos[:, 2]
+ if not sc2NS:
+ return zns
+ else:
+ sc2NS = gsm_pos[:, 2] - zns
+ return sc2NS
+
+
+def themis_ns_model(time, gsm_pos, sc2NS=False):
+ """
+ NAME:
+ themis_ns_model
+ PURPOSE:
+ This routine calculates the position along the zaxis at a specific
+ x and y location. The themis model is used for this calculation.
+ The themis model uses z-sm (converted from z-gsm) for the inner probes
+ and the Hammond model for the outer probes.
+ INPUT:
+ time - string or double format
+ double(s) seconds since 1970
+ string(s) format: YYYY-MM-DD/hh:mm:ss
+ gsm_pos - position vector in GSM coordinates in re (pos[*,3])
+ OUTPUT: returns Z displacement of the neutral sheet above or below the XY plane in Re (zgsm of the NS)
+ Value is positive if NS is above z=0 gsm plane, negative if below
+ KEYWORDS
+ sc2NS - if set returns Z displacement from the spacecraft to the neutral sheet
+ Value is positive if the NS is northward of the SC location, and negative if below
+ NOTES;
+ Reference:
+ The themis model uses z-sm (converted from z-gsm) for the inner probes
+ and the Hammond model (default) for the outer probes. The algorithm can be found
+ in ssllib neutralsheet.pro.
+ HISTORY:
+ """
+ # initialize constants and variables
+ re = 6378.
+ h0 = 8.6 # 10.5 # hinge point of the neutral sheet
+ rad = np.pi/180.
+ dz2NS = np.zeros(len(time))
+
+ # constants used in hammond model
+ H1=8.6
+ Y0=20.2
+ D=12.2
+
+ # calculate the radial distance
+ rdist = np.sqrt(gsm_pos[:,0]**2 + gsm_pos[:,1]**2 + gsm_pos[:,2]**2)
+
+ # Use the sm coordinates for radial distances <= h0 (8.6)
+ sm_ind = np.argwhere(rdist <= h0).flatten()
+ if len(sm_ind) > 0:
+ sm_pos = cotrans(time_in=time[sm_ind], data_in=gsm_pos[sm_ind,:], coord_in='gsm', coord_out='sm')
+ dz2NS[sm_ind] = -sm_pos[:, 2]
+
+ # Use the Hammond model for radial distances > h0 (8.6)
+ lr_ind = np.argwhere(rdist > h0).flatten()
+ if len(lr_ind) > 0:
+ # initialize variables
+ x = gsm_pos[lr_ind, 0]
+ y = gsm_pos[lr_ind, 1]
+ z = gsm_pos[lr_ind, 2]
+ tilt = np.zeros(len(x))
+ # check input time format and convert to doy, hr, min
+ for i in range(len(x)):
+ # calculate the tilt in degrees
+ tilt[i] = geopack_recalc(time_double(time[lr_ind[i]]))
+
+ # hammond model
+ iless = np.argwhere(np.abs(y) < Y0).flatten()
+ if len(iless) > 0:
+ dz2NS[lr_ind[iless]] = ((H1+D)*np.sqrt(1-y[iless]**2/Y0**2)-D)*np.sin(tilt[iless])
+
+ imore = np.argwhere(np.abs(y) >= Y0).flatten()
+ if len(imore) > 0:
+ dz2NS[lr_ind[imore]] = -D*np.sin(tilt[imore])
+
+ if not sc2NS:
+ return gsm_pos[:, 2] - (-dz2NS)
+ else:
+ return -dz2NS
+
+
+def aen_ns_model(time, gsm_pos, sc2NS=False):
+ """
+ NAME:
+ aen_ns_model
+
+ PURPOSE: This program is to find the AEN(Analytical Equatorial Neutral) sheet in the
+ magnetopause in different time and position
+
+ INPUT:
+ time - string or double format
+ double(s) seconds since 1970
+ string(s) format: YYYY-MM-DD/hh:mm:ss
+ gsm_pos - position vector in GSM coordinates in re (pos[*,3])
+
+ OUTPUT: returns Z displacement of the neutral sheet above or below the XY plane in Re (zgsm of the NS)
+ Value is positive if NS is above z=0 gsm plane, negative if below
+
+ KEYWORDS
+ sc2NS - if set returns Z displacement from the spacecraft to the neutral sheet
+ Value is positive if the NS is northward of the SC location, and negative if below
+
+ NOTES:
+
+ References:
+ (1) AEN(Analytical Equatorial Neutral):
+ Zhu, M. and R.-L. Xu, 1994, A continuous neutral sheet model and a normal
+ curved coordinate system in the magnetotail, Chinese J. Space Science, 14,
+ (4)269, (in Chinese).
+ Wang, Z.-D. and R.-L. Xu, Neutral Sheet Observed on ISEE Satellite,
+ Geophysical Research Letter, 21, (19)2087, 1994.
+ (2) Magnetopause model:
+ Sibeck, D. G., R. E. Lopez, and E. C. Roelof, Solar wind control of the
+ magnetopause shape, location, and motion, J. Grophys. Res., 96, 5489, 1991
+
+ HISTORY:
+
+ """
+ # initialize constants
+ h0 = 12.6/np.pi
+
+ dz2ns = np.zeros(len(time))
+
+ for i in range(len(time)):
+ # calculate the tilt angle
+ tt = geopack_recalc(time_double(time[i]))
+
+ # calculate the position of the neutral sheet
+ dz2ns[i] = -h0 * np.sin(tt) * np.arctan(gsm_pos[i,0]/5) * (2*np.cos(gsm_pos[i,1]/6))
+
+ if not sc2NS:
+ return dz2ns
+ else:
+ return gsm_pos[:, 2]-dz2ns
+
+
+def den_ns_model(time, gsm_pos, sc2NS=False):
+ """
+ NAME:
+ den_ns_model
+ PURPOSE:
+ This program finds the DEN(Displaced Equatorial Neutral) sheet inside
+ the magnetopause in different tine and positions. The routine calculates
+ the position along the zaxis at a specific location.
+ INPUT:
+ time - string or double format
+ double(s) seconds since 1970
+ string(s) format: YYYY-MM-DD/hh:mm:ss
+ gsm_pos - position vector in GSM coordinates in re (pos[*,3])
+ OUTPUT: returns Z displacement of the neutral sheet above or below the XY plane in Re (zgsm of the NS)
+ Value is positive if NS is above z=0 gsm plane, negative if below
+ KEYWORDS
+ sc2NS - if set returns Z displacement from the spacecraft to the neutral sheet
+ Value is positive if the NS is northward of the SC location, and negative if below
+ NOTES:
+ References:
+ (1) DEN(Displaced Equatorial Neutral):
+ Xu, R.-L., A Displaced Equatorial Neutral Sheet Surface Observed on ISEE-2
+ Satellite, J. Atmospheric and Terrestrial Phys., 58, 1085, 1991
+ (2) Magnetopause model:
+ Sibeck, D. G., R. E. Lopez, and R. C. Roelof, Solar wind control of the
+ magnetopause shape, location, and motion, J. Grophys. Res., 96, 5489, 1991
+ Original Authors of the FORTRAN source code:
+ Ronglan XU and Lei LI, Center for Space Sci. and Applied Res.,
+ Chinese Academy of Sciences, PO Box 8701, Beijing 100080, China
+ E-mail: XURL@SUN.IHEP.AC.CN, XURL@SUN20.CSSAR.AC.CN
+ This source code was ported from the original FORTRAN source code into IDL
+ The original source code only calculated to 10.05 RE. In this IDL version
+ that restriction was increased to 25.
+ HISTORY:
+ """
+ # calculate the position of the neutral sheet along z axis
+ H = 25.5
+ H1 = 25.05
+
+ dz2ns = np.zeros(len(time))
+
+ for i in range(len(time)):
+ done = 0
+ xgsm = gsm_pos[i,0]
+ ygsm = gsm_pos[i,1]
+
+ # get tilt angle of magnetic pole
+ tilt = geopack_recalc(time_double(time[i]))
+
+ if xgsm > -100.:
+ d = sd1(tilt, H, H1, xgsm)
+ ym21 = ((H1*(H+d))**2) * (1-(xgsm/(H*np.cos(tilt)))**2)
+ ym22 = (H+d)**2 - (d-xgsm/np.cos(tilt))**2
+ ym2 = ym21/ym22
+ if ym2 < 0:
+ #ie[i] = 2
+ continue
+ ym = np.sqrt(ym2)
+ xd2 = ((H*np.cos(tilt))**2) * (1-(ygsm/H1)**2)
+ if np.abs(ygsm) > H1:
+ xd2 = 0
+ # find the equatorial region
+ xd = np.sqrt(xd2)
+ rd = np.sqrt(xd**2+ygsm**2)
+ rsm = np.sqrt(xgsm**2+ygsm**2)
+ if xgsm > 0 or rsm <= rd:
+ dz2ns[i] = -xgsm*np.sin(tilt)/np.cos(tilt)
+ done = 1
+ if np.abs(ygsm) > ym and done != 1:
+ dz2ns[i] = -d*np.sin(tilt)
+ done = 1
+ if done != 1:
+ dz2ns[i] = ((H+d)*np.sqrt(1-(ygsm**2)/ym2)-d)*np.sin(tilt)
+
+ if not sc2NS:
+ return dz2ns
+ else:
+ sc2NS = gsm_pos[:,2] - dz2ns
+ return sc2NS
+
+
+def sfa4(aa, bb, cc, dd):
+ ndx = 0
+ xmin = 0
+ xmax = 50
+ ndxmax = 3
+ dx = 1
+ x = xmin
+ yy = x**4 + aa*x**3 + bb*x**2 + cc*x + dd
+
+ while ndx <= ndxmax:
+ x = x+dx
+ if x >= xmax:
+ ndx = 0
+ return x
+ y = x**4 + aa*x**3 + bb*x**2 + cc*x + dd
+ ry = y/yy
+ if ry < 0:
+ x = x-dx
+ dx = dx/10.
+ ndx = ndx+1
+ else:
+ yy = y
+ return x
+
+
+def sd1(til, H, H1, xgsm):
+ ct = np.cos(til)
+ xx = xgsm
+ xh = -H*ct
+ if xgsm >= xh:
+ xx = xh
+
+ # calculate the radius of the cross section
+ if xx <= -5.:
+ rm = 9*(10-3*xx)/(10-xx)+3
+ if xx > -5.:
+ rm = np.sqrt(18**2-(xx+5)**2)
+ rm2 = rm**2
+
+ # in cross_section areas above and below the neutral
+ # sheet
+ aa = 4*H-(32*rm2*H**2)/(np.pi**2*H1**2*(H-xx/ct))
+ bb = 2*H**2*(3.-8.*rm2/(np.pi**2*H1**2))
+ cc = 4*(H**3)
+ dd = H**4
+ x = sfa4(aa, bb, cc, dd)
+
+ d = x
+ if xgsm >= xh:
+ fk = -x/np.sqrt(-xh)
+ d = -fk*np.sqrt(-xgsm)
+ return d
+
+
+def fairfield_ns_model(time, gsm_pos, sc2NS=False):
+ """
+ NAME:
+ fairfield_NS_model
+
+ PURPOSE:
+ This routine calculates the position along the zaxis at a specific
+ x and y location. The Fairfield model is used to this calculation.
+
+ INPUT:
+ time - string or double format
+ double(s) seconds since 1970
+ string(s) format: YYYY-MM-DD/hh:mm:ss
+ gsm_pos - position vector in GSM coordinates in re (pos[*,3])
+
+ OUTPUT: returns Z displacement of the neutral sheet above or below the XY plane in Re (zgsm of the NS)
+ Value is positive if NS is above z=0 gsm plane, negative if below
+
+ KEYWORDS
+ sc2NS - if set returns Z displacement from the spacecraft to the neutral sheet
+ Value is positive if the NS is northward of the SC location, and negative if below
+
+ NOTES:
+ Reference:
+ A statistical determination of the shape and position of the
+ geomagnetic neutral sheet, Journal of Geophysical Research,
+ Vol. 85, No A2, pages 775-780, February 1, 1980
+ Author - D. Fairfield
+
+ HISTORY:
+ """
+
+ # constants (in re)
+ h0 = 10.5
+ y0 = 22.5
+ d = 14.
+
+ dz2NS = np.zeros(len(time))
+ tilt = np.zeros(len(time))
+
+ for i in range(len(time)):
+ # calculate tilt angle of geomagnetic axis
+ tilt[i] = geopack_recalc(time_double(time[i]))
+
+ # calculate the position of the neutral sheet along z axis
+ y_ge_y0 = np.argwhere(np.abs(gsm_pos[:,0]) >= y0).flatten()
+ y_lt_y0 = np.argwhere(np.abs(gsm_pos[:,0]) < y0).flatten()
+ if len(y_ge_y0) > 0:
+ dz2NS[y_ge_y0] = -d*np.sin(tilt[y_ge_y0])
+ if len(y_lt_y0) > 0:
+ dz2NS[y_lt_y0] = ((h0 + d) * np.sqrt(1 - gsm_pos[y_lt_y0,0]**2/y0**2) - d)*np.sin(tilt[y_lt_y0])
+
+ if not sc2NS:
+ return dz2NS
+ else:
+ sc2NS = gsm_pos[:, 2] - dz2NS
+ return sc2NS
+
+
+def den_fairfield_ns_model(time, gsm_pos, sc2NS=False):
+ """
+ NAME:
+ den_fairfield_ns_model
+ PURPOSE:
+ This routine calculates the position along the zaxis at a specific
+ x and y location.
+ INPUT:
+ time - string or double format
+ double(s) seconds since 1970
+ string(s) format: YYYY-MM-DD/hh:mm:ss
+ gsm_pos - position vector in GSM coordinates in re (pos[*,3])
+ OUTPUT: returns Z displacement of the neutral sheet above or below the XY plane in Re (zgsm of the NS)
+ Value is positive if NS is above z=0 gsm plane, negative if below
+ KEYWORDS
+ sc2NS - if set returns Z displacement from the spacecraft to the neutral sheet
+ Value is positive if the NS is northward of the SC location, and negative if below
+ HISTORY:
+ """
+ # initialize constants
+ dz2ns = np.zeros(len(time))
+
+ # Use the den model for radial distances <12.re
+ rdist = np.sqrt(gsm_pos[:,0]**2 + gsm_pos[:,1]**2 + gsm_pos[:,2]**2)
+ sm_ind = np.argwhere(rdist <= 10.).flatten()
+ if len(sm_ind) > 0:
+ dz2ns[sm_ind] = den_ns_model(time[sm_ind], gsm_pos[sm_ind,:])
+
+ # use the fairfield model for radial distances >12.re
+ lr_ind = np.argwhere(rdist > 10.).flatten()
+ if len(lr_ind) > 0:
+ dz2ns[lr_ind] = fairfield_ns_model(time[lr_ind], gsm_pos[lr_ind,:])
+
+ if not sc2NS:
+ return dz2ns
+ else:
+ sc2NS = gsm_pos[:,2] - dz2ns
+ return sc2NS
+
+
+def lopez_ns_model(time, gsm_pos, kp=None, mlt=None, sc2NS=False):
+ """
+ NAME:
+ lopez_ns_model
+ PURPOSE:
+ This routine calculates the position along the zaxis at a specific
+ x and y location. The Lopez model is used for this calculation.
+ INPUT:
+ time - string or double format
+ double(s) seconds since 1970
+ string(s) format: YYYY-MM-DD/hh:mm:ss
+ gsm_pos - position vector in GSM coordinates in re (pos[*,3])
+ kp - kp index value
+ mlt - magnetic local time in degrees (0=midnight)
+ OUTPUT: returns Z displacement of the neutral sheet above or below the XY plane in Re (zgsm of the NS)
+ Value is positive if NS is above z=0 gsm plane, negative if below
+ KEYWORDS
+ sc2NS - if set returns Z displacement from the spacecraft to the neutral sheet
+ Value is positive if the NS is northward of the SC location, and negative if below
+ NOTES:
+ Reference:
+ The position of the magnetotail neutral sheet in the near-Earth Region,
+ Geophysical Research Letters, Vol. 17, No 10, pages 1617-1620, 1990
+ Author - Ramon E. Lopez
+ The lopez model is best used for distances <8.8 RE
+ HISTORY:
+ """
+ # constants
+ rad = np.pi/180.
+ if kp is None:
+ kp = 0
+ if mlt is None:
+ mlt = 0.0
+ tilt = np.zeros(len(time))
+
+ for i in range(len(time)):
+ # calculate tilt angle of geomagnetic axis
+ tilt[i] = geopack_recalc(time[i])
+
+ # calculate the position of the neutral sheet along z axis
+ rdist = np.sqrt(gsm_pos[:,0]**2 + gsm_pos[:,1]**2 + gsm_pos[:,2]**2)
+ mlat = -(0.14*kp + 0.69) * ((np.cos(rad*mlt))**.3333333) * (0.065*(rdist**0.8) - 0.16) * tilt * 180.0/np.pi
+ mlat = mlat + tilt * 180.0/np.pi
+
+ # convert magnetic latitude to position
+ x, y, z = rthph2xyz(rdist, mlat, mlt)
+
+ if not sc2NS:
+ return z
+ else:
+ sc2NS = gsm_pos[:,2] - z
+ return sc2NS
+
+
+def rthph2xyz(r,th,ph):
+ """
+ Helper function for the lopez model
+ converts spherical to cartesian coordinates
+ NOTE: th,ph in degrees, and th is latitude (not colatitude) (i.e. [-90->90])
+ """
+ FLAG=6.8792E+28
+ FLAG98=0.98*FLAG
+ PI=3.1415926535898
+
+ thrad=th*PI/180.
+ phrad=ph*PI/180.
+ sth=np.sin(thrad)
+ cth=np.cos(thrad)
+ sph=np.sin(phrad)
+ cph=np.cos(phrad)
+ x=r*cth*cph
+ y=r*cth*sph
+ z=r*sth
+
+ iflags=np.argwhere((r > FLAG98) | (th > FLAG98) | (ph > FLAG98)).flatten()
+ if (len(iflags) > 0):
+ x[iflags]=FLAG
+ y[iflags]=FLAG
+ z[iflags]=FLAG
+
+ return x,y,z
+
+
+def neutral_sheet(time, pos, kp=None, model='themis', mlt=None, in_coord='gsm', sc2NS=False):
+ """
+ Calculate the distance to the neutral sheet for a given time and position.
+
+ Parameters
+ ----------
+ time : datetime
+ Time of interest.
+ pos : array_like
+ Position of interest.
+ kp : array_like, optional
+ Kp index.
+ model : str, optional
+ Neutral sheet model to use.
+ mlt : array_like, optional
+ Magnetic local time.
+ in_coord : str, optional
+ Coordinate system of the input position.
+ sc2NS : Bool, optional
+ Flag to return spacecraft to neutral sheet distance.
+
+ Returns
+ -------
+ distance2NS : array_like
+ Distance to the neutral sheet.
+
+ """
+
+ time = np.array(time)
+
+ # validate and initialize parameters if not set
+ if model is None:
+ model = 'themis'
+ else:
+ model = model.lower()
+ models = ['sm', 'themis', 'aen', 'den', 'fairfield', 'den_fairfield', 'lopez']
+ if model not in models:
+ logging.error('An invalid neutral sheet model name was used. Valid entries include: ')
+ logging.error(models)
+ return
+
+ # check input coordinate system, convert to gsm if needed
+ if in_coord is None:
+ in_coord = 'gsm'
+ else:
+ in_coord = in_coord.lower()
+ if in_coord == 'gsm':
+ gsm_pos = pos
+ else:
+ gsm_pos = cotrans(time_in=time, data_in=pos, coord_in=in_coord, coord_out='gsm')
+
+ # call the appropriate neutral sheet model
+ if model == 'sm':
+ return sm_ns_model(time, gsm_pos, sc2NS=sc2NS)
+ elif model == 'themis':
+ return themis_ns_model(time, gsm_pos, sc2NS=sc2NS)
+ elif model == 'aen':
+ return aen_ns_model(time, gsm_pos, sc2NS=sc2NS)
+ elif model == 'den':
+ return den_ns_model(time, gsm_pos, sc2NS=sc2NS)
+ elif model == 'fairfield':
+ return fairfield_ns_model(time, gsm_pos, sc2NS=sc2NS)
+ elif model == 'den_fairfield':
+ return den_fairfield_ns_model(time, gsm_pos, sc2NS=sc2NS)
+ elif model == 'lopez':
+ return lopez_ns_model(time, gsm_pos, kp=kp, mlt=mlt, sc2NS=sc2NS)
diff --git a/pyspedas/analysis/subtract_average.py b/pyspedas/analysis/subtract_average.py
index 25913878..703cbd12 100644
--- a/pyspedas/analysis/subtract_average.py
+++ b/pyspedas/analysis/subtract_average.py
@@ -6,9 +6,8 @@
Similar to tsub_average.pro in IDL SPEDAS.
"""
-import pyspedas
+import logging
import pytplot
-import numpy
def subtract_average(names, new_names=None, suffix=None, overwrite=None,
@@ -36,56 +35,6 @@ def subtract_average(names, new_names=None, suffix=None, overwrite=None,
None.
"""
- old_names = pyspedas.tnames(names)
-
- if len(old_names) < 1:
- print('Subtract Average error: No pytplot names were provided.')
- return
-
- if suffix is None:
- if median:
- suffix = '-m'
- else:
- suffix = '-d'
-
- if overwrite is not None:
- n_names = old_names
- elif new_names is None:
- n_names = [s + suffix for s in old_names]
- else:
- n_names = new_names
-
- if isinstance(n_names, str):
- n_names = [n_names]
-
- if len(n_names) != len(old_names):
- n_names = [s + suffix for s in old_names]
-
- old_names = pyspedas.tnames(names)
-
- for old_idx, old in enumerate(old_names):
- new = n_names[old_idx]
-
- if new != old:
- pyspedas.tcopy(old, new)
-
- data = pytplot.data_quants[new].values
- dim = data.shape
- if median:
- if len(dim) == 1:
- data -= numpy.median(data, axis=0)
- else:
- for i in range(dim[1]):
- data[:, i] -= numpy.median(data[:, i], axis=0)
- ptype = 'Median'
- else:
- if len(dim) == 1:
- data -= numpy.mean(data, axis=0)
- else:
- for i in range(dim[1]):
- data[:, i] -= numpy.mean(data[:, i], axis=0)
- ptype = 'Mean'
-
- pytplot.data_quants[new].values = data
-
- print('Subtract ' + ptype + ' was applied to: ' + new)
+ logging.info("subtract_average has been moved to the pytplot.tplot_math package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ pytplot.tplot_math.subtract_average(names=names,new_names=new_names,suffix=suffix,overwrite=overwrite,median=median)
diff --git a/pyspedas/analysis/subtract_median.py b/pyspedas/analysis/subtract_median.py
index 379e30d7..74f6e277 100644
--- a/pyspedas/analysis/subtract_median.py
+++ b/pyspedas/analysis/subtract_median.py
@@ -6,8 +6,8 @@
Similar to tsub_average.pro in IDL SPEDAS.
"""
-from .subtract_average import subtract_average
-
+import logging
+import pytplot
def subtract_median(names, new_names=None, suffix=None, overwrite=None):
"""
@@ -30,5 +30,7 @@ def subtract_median(names, new_names=None, suffix=None, overwrite=None):
None.
"""
- subtract_average(names, new_names=None, suffix=None, overwrite=None,
- median=1)
+ logging.info("subtract_median has been moved to the pytplot.tplot_math package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+
+ pytplot.tplot_math.subtract_median(names=names, new_names=new_names, suffix=suffix, overwrite=overwrite)
diff --git a/pyspedas/analysis/tcrossp.py b/pyspedas/analysis/tcrossp.py
index 28dfb445..267e6e1d 100644
--- a/pyspedas/analysis/tcrossp.py
+++ b/pyspedas/analysis/tcrossp.py
@@ -1,6 +1,6 @@
-import numpy as np
-from pytplot import get_data, store_data
+import logging
+import pytplot
def tcrossp(v1, v2, newname=None, return_data=False):
"""
@@ -26,46 +26,6 @@ def tcrossp(v1, v2, newname=None, return_data=False):
--------
Name of the tplot variable
"""
-
- v1_data = None
- v2_data = None
-
- if not isinstance(v1, np.ndarray) and isinstance(v1, str):
- v1_data = get_data(v1)
- v1_name = v1
-
- if v1_data is not None:
- data1 = v1_data[1]
- else:
- v1_name = 'var1'
- data1 = v1
-
-
- if not isinstance(v2, np.ndarray) and isinstance(v2, str):
- v2_data = get_data(v2)
- v2_name = v2
-
- if v2_data is not None:
- data2 = v2_data[1]
- else:
- v2_name = 'var2'
- data2 = v2
-
- if newname is None:
- newname = v1_name + '_cross_' + v2_name
-
- cp = np.cross(data1, data2)
-
- if return_data:
- return cp
- else:
- out = cp
- if v2_data is None:
- if len(cp.shape) == 1:
- out = np.atleast_2d(cp)
- times = np.zeros(out.shape[0])
- else:
- times = v2_data[0]
- store_data(newname, data={'x': times, 'y': out})
- return newname
-
+ logging.info("tcrossp has been moved to the pytplot.tplot_math module. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.tplot_math.tcrossp(v1=v1, v2=v2, newname=newname,return_data=return_data)
\ No newline at end of file
diff --git a/pyspedas/analysis/tdeflag.py b/pyspedas/analysis/tdeflag.py
index e115787a..51e58b54 100644
--- a/pyspedas/analysis/tdeflag.py
+++ b/pyspedas/analysis/tdeflag.py
@@ -7,6 +7,7 @@
Similar to tdeflag.pro in IDL SPEDAS.
"""
+import logging
import pyspedas
import pytplot
import numpy
@@ -41,7 +42,7 @@ def tdeflag(names, method=None, new_names=None, suffix=None,
old_names = pyspedas.tnames(names)
if len(old_names) < 1:
- print('tdeflag error: No pytplot names were provided.')
+ logging.error('tdeflag error: No pytplot names were provided.')
return
if suffix is None:
@@ -72,4 +73,4 @@ def tdeflag(names, method=None, new_names=None, suffix=None,
new_data.append(data[j])
pytplot.store_data(n_names[i], data={'x': new_time, 'y': new_data})
- print('tdeflag was applied to: ' + n_names[i])
+ logging.info('tdeflag was applied to: ' + n_names[i])
diff --git a/pyspedas/analysis/tdotp.py b/pyspedas/analysis/tdotp.py
index 7053218d..a092484d 100644
--- a/pyspedas/analysis/tdotp.py
+++ b/pyspedas/analysis/tdotp.py
@@ -1,5 +1,6 @@
+import logging
+import pytplot
-from pytplot import get_data, store_data
def tdotp(variable1, variable2, newname=None):
"""
@@ -24,25 +25,6 @@ def tdotp(variable1, variable2, newname=None):
--------
Name of the tplot variable
"""
-
- data1 = get_data(variable1, xarray=True)
- data2 = get_data(variable2, xarray=True)
-
- if data1 is None:
- print('Variable not found: ' + variable1)
- return
-
- if data2 is None:
- print('Variable not found: ' + variable2)
- return
-
- if newname is None:
- newname = variable1 + '_dot_' + variable2
-
- # calculate the dot product
- out = data1.dot(data2, dims='v_dim')
-
- # save the output
- saved = store_data(newname, data={'x': data1.time.values, 'y': out.values})
-
- return newname
+ logging.info("tdotp has been moved to the pytplot.tplot_math module. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ pytplot.tplot_math.tdotp(variable1=variable1,variable2=variable2,newname=newname)
\ No newline at end of file
diff --git a/pyspedas/analysis/tdpwrspc.py b/pyspedas/analysis/tdpwrspc.py
index c4c1b927..0fff8869 100644
--- a/pyspedas/analysis/tdpwrspc.py
+++ b/pyspedas/analysis/tdpwrspc.py
@@ -8,13 +8,16 @@
Similar to tdpwrspc.pro in IDL SPEDAS.
"""
+import logging
+import numpy as np
from .dpwrspc import dpwrspc
from pytplot import get_data, store_data, options, split_vec
+from pyspedas import time_double
-def tdpwrspc(varname, newname=None, nboxpoints=256, nshiftpoints=128,
+def tdpwrspc(varname, newname=None, nboxpoints=None, nshiftpoints=None,
binsize=3, nohanning=False, noline=False, notperhz=False,
- notmvariance=False):
+ trange=None, notmvariance=False):
"""
Compute power spectra for a tplot variable.
@@ -72,16 +75,56 @@ def tdpwrspc(varname, newname=None, nboxpoints=256, nshiftpoints=128,
notmvariance=notmvariance))
return out_vars
else:
- pwrspc = dpwrspc(data_tuple[0], data_tuple[1],
- nboxpoints=nboxpoints,
- nshiftpoints=nshiftpoints,
+ t = data_tuple[0]
+ y = data_tuple[1]
+ if trange is not None:
+ tr = time_double(trange)
+ ok = np.argwhere((t >= tr[0]) & (t < tr[1]))
+ if len(ok) == 0:
+ logging.error('No data in time range')
+ logging.error(f'{tr}')
+ return
+ t = t[ok]
+ y = y[ok]
+
+ # filter out NaNs
+ ok = np.isfinite(y)
+ if len(ok) == 0:
+ logging.error('No finite data in time range')
+ return
+ t = t[ok]
+ y = y[ok]
+
+ t00 = data_tuple[0][0]
+ t = t - t00
+
+ # Only do this if there are enough data points, default nboxpoints to
+ # 64 and nshiftpoints to 32, and use larger values when there are more
+ # points
+ if nboxpoints is None:
+ nbp = np.max([2**(np.floor(np.log(len(ok)) / np.log(2)) - 5), 8])
+ else:
+ nbp = nboxpoints
+
+ if nshiftpoints is None:
+ nsp = nbp/2.0
+ else:
+ nsp = nshiftpoints
+
+ if len(ok) <= nbp:
+ logging.error('Not enough data in time range')
+ return
+
+ pwrspc = dpwrspc(t, y,
+ nboxpoints=nbp,
+ nshiftpoints=nsp,
binsize=binsize,
nohanning=nohanning,
noline=noline, notperhz=notperhz,
notmvariance=notmvariance)
if pwrspc is not None:
- store_data(newname, data={'x': pwrspc[0],
+ store_data(newname, data={'x': pwrspc[0] + t00,
'y': pwrspc[2],
'v': pwrspc[1]})
options(newname, 'spec', True)
diff --git a/pyspedas/analysis/tests/test_twavpol.py b/pyspedas/analysis/tests/test_twavpol.py
new file mode 100644
index 00000000..b8752c7d
--- /dev/null
+++ b/pyspedas/analysis/tests/test_twavpol.py
@@ -0,0 +1,112 @@
+"""Tests of twavpol functions."""
+import pytplot.get_data
+from pytplot.importers.cdf_to_tplot import cdf_to_tplot
+import unittest
+import pytplot
+from numpy.testing import assert_array_almost_equal_nulp, assert_array_max_ulp, assert_allclose
+import numpy as np
+
+from pytplot import data_exists
+
+from pyspedas.analysis.twavpol import twavpol
+
+class TwavpolDataValidation(unittest.TestCase):
+ """ Compares cotrans results between Python and IDL """
+
+ @classmethod
+ def setUpClass(cls):
+ """
+ IDL Data has to be downloaded to perform these tests
+ The SPEDAS script that creates the file: projects/themis/state/cotrans/thm_cotrans_validate.pro
+ """
+ from pyspedas.utilities.download import download
+ from pyspedas.themis.config import CONFIG
+
+ # Testing tolerance
+ cls.tol = 1e-10
+
+ # Download tplot files
+ remote_server = 'https://spedas.org/'
+ # remote_name = 'testfiles/thm_cotrans_validate.cdf'
+ remote_name = 'testfiles/thc_twavpol_validate.tplot'
+ datafile = download(remote_file=remote_name,
+ remote_path=remote_server,
+ local_path=CONFIG['local_data_dir'],
+ no_download=False)
+ if not datafile:
+ # Skip tests
+ raise unittest.SkipTest("Cannot download data validation file")
+
+ # Load validation variables from the test file
+ pytplot.del_data('*')
+ filename = datafile[0]
+ # pytplot.cdf_to_tplot(filename)
+ pytplot.tplot_restore(filename)
+ pytplot.tplot_names()
+ #pytplot.tplot('thc_scf_fac')
+ #pytplot.tplot('thc_scf_fac_powspec')
+ #pytplot.tplot('thc_scf_fac')
+ cls.thc_scf_fac = pytplot.get_data('thc_scf_fac')
+ cls.thc_scf_fac_attr = pytplot.get_data('thc_scf_fac',metadata=True)
+ cls.thc_scf_fac_powspec = pytplot.get_data('thc_scf_fac_powspec')
+ cls.thc_scf_fac_powspec_attr = pytplot.get_data('thc_scf_fac_powspec',metadata=True)
+ cls.thc_scf_fac_degpol = pytplot.get_data('thc_scf_fac_degpol')
+ cls.thc_scf_fac_waveangle = pytplot.get_data('thc_scf_fac_waveangle')
+ cls.thc_scf_fac_elliptict = pytplot.get_data('thc_scf_fac_elliptict')
+ cls.thc_scf_fac_helict = pytplot.get_data('thc_scf_fac_helict')
+
+ twavpol('thc_scf_fac')
+
+
+ def setUp(self):
+ """ We need to clean tplot variables before each run"""
+ # pytplot.del_data('*')
+
+ def test_powspec(self):
+ """ Validate twavpol power spectrum output """
+
+ py_powspec = pytplot.get_data('thc_scf_fac_powspec')
+ #print(np.nanmin(py_powspec.y),np.nanmax(py_powspec.y))
+ assert_allclose(py_powspec.times,self.thc_scf_fac_powspec.times,atol=1.0e-06)
+ assert_allclose(py_powspec.y, self.thc_scf_fac_powspec.y, atol=1.0e-06,rtol=1.0e-06)
+ #pytplot.tplot('thc_scf_fac_powspec')
+
+ def test_degpol(self):
+ """ Validate twavpol degpol output """
+
+ py_degpol = pytplot.get_data('thc_scf_fac_degpol')
+ #print(np.min(py_degpol.y),np.nanmax(py_degpol.y))
+ assert_allclose(py_degpol.times,self.thc_scf_fac_degpol.times,atol=1.0e-06)
+ assert_allclose(py_degpol.y, self.thc_scf_fac_degpol.y, atol=1.0e-06,rtol=1.0e-06)
+ #pytplot.tplot('thc_scf_fac_degpol')
+
+ def test_waveangle(self):
+ """ Validate twavpol waveangle output """
+
+ py_waveangle = pytplot.get_data('thc_scf_fac_waveangle')
+ #print(np.nanmin(py_waveangle.y),np.nanmax(py_waveangle.y))
+ assert_allclose(py_waveangle.times,self.thc_scf_fac_waveangle.times,atol=1.0e-05)
+ assert_allclose(py_waveangle.y, self.thc_scf_fac_waveangle.y, atol=1.0e-05,rtol=1.0e-06)
+ #pytplot.tplot('thc_scf_fac_waveangle')
+
+ def test_elliptict(self):
+ """ Validate twavpol elliptict output """
+
+ py_elliptict = pytplot.get_data('thc_scf_fac_elliptict')
+ #print(np.nanmin(py_elliptict.y),np.nanmax(py_elliptict.y))
+ assert_allclose(py_elliptict.times,self.thc_scf_fac_elliptict.times,atol=1.0e-06)
+ assert_allclose(py_elliptict.y, self.thc_scf_fac_elliptict.y, atol=1.0e-06,rtol=1.0e-06)
+ #pytplot.tplot('thc_scf_fac_elliptict')
+
+ def test_helict(self):
+ """ Validate twavpol helict output """
+
+ py_helict = pytplot.get_data('thc_scf_fac_helict')
+ #print(np.nanmin(py_helict.y),np.nanmax(py_helict.y))
+ assert_allclose(py_helict.times,self.thc_scf_fac_helict.times,atol=1.0e-06)
+ assert_allclose(py_helict.y, self.thc_scf_fac_helict.y, atol=1.0e-06,rtol=1.0e-06)
+ #pytplot.tplot('thc_scf_fac_helict')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/analysis/tests/tests.py b/pyspedas/analysis/tests/tests.py
index 119bcd36..cff7c348 100644
--- a/pyspedas/analysis/tests/tests.py
+++ b/pyspedas/analysis/tests/tests.py
@@ -1,13 +1,13 @@
"""Automated tests for the analysis functions."""
import unittest
-from pyspedas.analysis.tsmooth import smooth
+from pytplot import smooth
from pyspedas import (subtract_average, subtract_median, tsmooth, avg_data,
yclip, time_clip, deriv_data, tdeflag, clean_spikes,
tinterpol)
-from pyspedas.analysis.tcrossp import tcrossp
-from pyspedas.analysis.tdotp import tdotp
-from pyspedas.analysis.tnormalize import tnormalize
+from pytplot import tcrossp
+from pytplot import tdotp
+from pytplot import tnormalize
from pytplot import get_data, store_data, replace_data
import numpy as np
@@ -24,12 +24,15 @@ def setUp(self):
class AnalysisTestCases(BaseTestCase):
"""Test functions under analysis folder."""
+
def test_tdotp(self):
store_data('var1', data={'x': [0], 'y': [[3, -3, 1]]})
store_data('var2', data={'x': [0], 'y': [[4, 9, 2]]})
dp = tdotp('var1', 'var2')
dpdata = get_data('var1_dot_var2')
self.assertTrue(dpdata.y == np.array([-13]))
+ dp = tdotp('var1_doesnt_exist', 'var2')
+ dp = tdotp('var1', 'var2_doesnt_exist')
def test_tcrossp(self):
""" cross product tests"""
@@ -69,11 +72,11 @@ def test_subtract_median(self):
subtract_median('aaabbbcc')
subtract_median('test1', new_names='aabb')
d = get_data('aabb')
+ self.assertTrue(len(d[1]) == 6)
subtract_median(['test', 'aabb'], new_names='aaabbb')
subtract_median('test1', overwrite=1)
subtract_average('test', new_names="testtest")
subtract_average(['test-m', 'test'], new_names="testtest2")
- self.assertTrue(len(d[1]) == 6)
def test_subtract_average(self):
"""Test subtract_average."""
@@ -137,23 +140,25 @@ def test_avg_data(self):
d = get_data('test-avg')
self.assertTrue((d[1] == [4.0, 11.5, 10.5]).all())
avg_data('test', width=2, overwrite=True) # Test overwrite
- avg_data('test', dt=4.0, noremainder=False) # Test dt option
store_data('test', data={'x': [1., 2., 3., 4., 5., 6.],
'y': [3., 5., 8., -4., 20., 1.]})
avg_data('test', width=2, new_names='aabb') # Test new_names
d = get_data('aabb')
# Test multiple names
avg_data(['test', 'aabb'], new_names='aaabbb', width=2)
+ dt = [1., 12., 13., 14., 15., 16.]
dn = [[3., 5., 8.], [15., 20., 1.], [3., 5., 8.], [15., 20., 1.],
[23., 15., 28.], [15., 20., 1.]]
- store_data('test1', data={'x': [1., 12., 13., 14., 15., 16.], 'y': dn})
+ dv = dn
+ store_data('test1', data={'x': dt, 'y': dn, 'v': dv})
avg_data('test1', width=2) # Test 3-d data
- avg_data('test1', new_names='test2', dt=2.) # Test a reasonable dt
- avg_data('test1', dt=-1.) # Test dt error
- avg_data('test1', dt=1.e8) # Test dt error
+ avg_data('test1', new_names='test2', res=2.) # Test a reasonable resolution
+ avg_data('test1', res=-1.) # Test res error
+ avg_data('test1', res=1.e8) # Test res error
d2 = get_data('test2')
- self.assertTrue(len(d) > 0)
+ self.assertTrue(len(d2) > 0)
self.assertTrue(d2[1][-1][0] == 19.0)
+ self.assertTrue(len(d2[2]) == len(d2[0]))
def test_clean_spikes(self):
"""Test clean_spikes."""
@@ -221,8 +226,17 @@ def test_tinterpol(self):
tinterpol('aaabbbccc', 'test') # Test non-existent name
tn = [1., 1.5, 4.6, 5.8, 6.]
dn = [10., 15., 46., 58., 60.]
+ data = np.array([[0, 1, 2, 3, 4],
+ [5, 6, 7, 8, 9],
+ [10, 11, 12, 13, 14],
+ [15, 16, 17, 18, 19],
+ [20, 21, 22, 23, 24]])
store_data('test1', data={'x': tn, 'y': dn})
+ store_data('test2', data={'x': tn, 'y': data, 'v': [10, 20, 30, 40, 50]})
tinterpol('test1', 'test')
+ tinterpol('test1', 'doesnt_exist')
+ tinterpol('test2', 'test', newname='')
+ tinterpol('test2', [1, 2, 3, 4, 5, 6])
d = get_data('test1-itrp')
self.assertTrue(d[1][1] == 20.)
diff --git a/pyspedas/analysis/time_clip.py b/pyspedas/analysis/time_clip.py
index 22e2d146..ba55164f 100644
--- a/pyspedas/analysis/time_clip.py
+++ b/pyspedas/analysis/time_clip.py
@@ -6,8 +6,7 @@
Similar to tclip.pro in IDL SPEDAS.
"""
-
-import pyspedas
+import logging
import pytplot
@@ -37,151 +36,7 @@ def time_clip(names, time_start, time_end, new_names=None, suffix=None,
None.
"""
- old_names = pyspedas.tnames(names)
-
- if len(old_names) < 1:
- print('Time clip error: No pytplot names were provided.')
- return
-
- if suffix is None:
- suffix = '-tclip'
-
- if overwrite is not None:
- n_names = old_names
- elif new_names is None:
- n_names = [s + suffix for s in old_names]
- else:
- n_names = new_names
-
- if isinstance(n_names, str):
- n_names = [n_names]
-
- if len(n_names) != len(old_names):
- n_names = [s + suffix for s in old_names]
-
- for j in range(len(old_names)):
- if old_names[j] != n_names[j]:
- pyspedas.tcopy(old_names[j], n_names[j])
-
- alldata = pytplot.get_data(n_names[j])
- metadata = pytplot.get_data(n_names[j], metadata=True)
-
- if not isinstance(alldata, tuple): # NRV variable
- continue
-
- time = alldata[0]
- data = alldata[1]
-
- index_start = 0
- index_end = len(time)
-
- if index_end < 1:
- print('Time clip found empty list.')
- continue
-
- new_time = pyspedas.time_float(time)
- new_time_start = pyspedas.time_float(time_start)
- new_time_end = pyspedas.time_float(time_end)
-
- if new_time_start > new_time_end:
- print('Error: Start time is larger than end time.')
- continue
-
- if (new_time_start > new_time[-1]) or (new_time_end < new_time[0]):
- print('Time clip returns empty data.')
- continue
-
- if (new_time_start <= new_time[0]) and (new_time_end >= new_time[-1]):
- print('Time clip returns full data set.')
- continue
-
- for i in range(index_end):
- if new_time[i] >= new_time_start:
- index_start = i
- break
- found_end = index_end
- for i in range(index_start, index_end):
- if new_time[i] > new_time_end:
- found_end = i
- break
- index_end = found_end
-
- tmp_q = pytplot.data_quants[n_names[j]]
-
- if 'v1' in tmp_q.coords.keys():
- if len(tmp_q.coords['v1'].values.shape) == 2:
- v1_data = tmp_q.coords['v1'].values[index_start:index_end, :]
- else:
- v1_data = tmp_q.coords['v1'].values
-
- if 'v2' in tmp_q.coords.keys():
- if len(tmp_q.coords['v2'].values.shape) == 2:
- v2_data = tmp_q.coords['v2'].values[index_start:index_end, :]
- else:
- v2_data = tmp_q.coords['v2'].values
-
- if 'v3' in tmp_q.coords.keys():
- if len(tmp_q.coords['v3'].values.shape) == 2:
- v3_data = tmp_q.coords['v3'].values[index_start:index_end, :]
- else:
- v3_data = tmp_q.coords['v3'].values
-
- if 'v' in tmp_q.coords.keys():
- if len(tmp_q.coords['v'].values.shape) == 2:
- v_data = tmp_q.coords['v'].values[index_start:index_end, :]
- else:
- v_data = tmp_q.coords['v'].values
-
- if 'spec_bins' in tmp_q.coords.keys():
- if len(tmp_q.coords['spec_bins'].values.shape) == 2:
- v_data = tmp_q.coords['spec_bins']\
- .values[index_start:index_end, :]
- else:
- v_data = tmp_q.coords['spec_bins'].values
-
- try:
- if 'v1' in tmp_q.coords.keys() and\
- 'v2' in tmp_q.coords.keys() and\
- 'v3' in tmp_q.coords.keys():
- pytplot.store_data(n_names[j], data={
- 'x': time[index_start:index_end],
- 'y': data[index_start:index_end, :, :, :],
- 'v1': v1_data, 'v2': v2_data, 'v3': v3_data},
- attr_dict=metadata)
- elif 'v1' in tmp_q.coords.keys() and\
- 'v2' in tmp_q.coords.keys():
- pytplot.store_data(n_names[j], data={
- 'x': time[index_start:index_end],
- 'y': data[index_start:index_end, :, :],
- 'v1': v1_data, 'v2': v2_data},
- attr_dict=metadata)
- elif 'v1' in tmp_q.coords.keys():
- pytplot.store_data(n_names[j], data={
- 'x': time[index_start:index_end],
- 'y': data[index_start:index_end, :],
- 'v1': v1_data}, attr_dict=metadata)
- elif 'spec_bins' in tmp_q.coords.keys():
- pytplot.store_data(n_names[j], data={
- 'x': time[index_start:index_end],
- 'y': data[index_start:index_end, :],
- 'v': v_data}, attr_dict=metadata)
- elif 'v' in tmp_q.coords.keys():
- pytplot.store_data(n_names[j], data={
- 'x': time[index_start:index_end],
- 'y': data[index_start:index_end, :],
- 'v': v_data}, attr_dict=metadata)
- elif data.ndim == 1:
- pytplot.store_data(n_names[j], data={
- 'x': time[index_start:index_end],
- 'y': data[index_start:index_end]},
- attr_dict=metadata)
- else:
- pytplot.store_data(n_names[j], data={
- 'x': time[index_start:index_end],
- 'y': data[index_start:index_end]},
- attr_dict=metadata)
- except:
- print('Problem time clipping: ' + n_names[j])
- continue
-
- print('Time clip was applied to: ' + n_names[j])
+ logging.info("time_clip has been moved to the pytplot.tplot_math module. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ pytplot.tplot_math.time_clip(names=names,time_start=time_start,time_end=time_end,new_names=new_names,suffix=suffix,
+ overwrite=overwrite)
\ No newline at end of file
diff --git a/pyspedas/analysis/tinterpol.py b/pyspedas/analysis/tinterpol.py
index b8a952bc..03426eab 100644
--- a/pyspedas/analysis/tinterpol.py
+++ b/pyspedas/analysis/tinterpol.py
@@ -7,9 +7,12 @@
Similar to tinterpol.pro in IDL SPEDAS.
"""
-
+import datetime
+import logging
from pyspedas import tnames
-from pytplot import get_data, store_data
+from pytplot import get_data, store
+import numpy as np
+
def tinterpol(names, interp_to, method=None, newname=None, suffix=None):
"""
@@ -50,7 +53,7 @@ def tinterpol(names, interp_to, method=None, newname=None, suffix=None):
old_names = tnames(names)
if len(old_names) < 1:
- print('tinterpol error: No pytplot names were provided.')
+ logging.error('tinterpol error: No pytplot names were provided.')
return
if suffix is None:
@@ -61,16 +64,14 @@ def tinterpol(names, interp_to, method=None, newname=None, suffix=None):
if (newname is None) or (len(newname) == 1 and newname[0] is None):
n_names = [s + suffix for s in old_names]
- elif newname == '':
- n_names = old_names
else:
n_names = newname
if isinstance(interp_to, str):
- interp_to_data = get_data(interp_to)
+ interp_to_data = get_data(interp_to, dt=True)
if interp_to_data is None:
- print('Error, tplot variable: ' + interp_to + ' not found.')
+ logging.error('Error, tplot variable: ' + interp_to + ' not found.')
return
interp_to_times = interp_to_data[0]
@@ -79,19 +80,23 @@ def tinterpol(names, interp_to, method=None, newname=None, suffix=None):
for name_idx, name in enumerate(old_names):
xdata = get_data(name, xarray=True)
+ metadata = get_data(name, metadata=True)
+
+ if not isinstance(interp_to_times[0], datetime.datetime) and not isinstance(interp_to_times[0], np.datetime64):
+ interp_to_times = [datetime.datetime.utcfromtimestamp(time) for time in interp_to_times]
xdata_interpolated = xdata.interp({'time': interp_to_times},
method=method)
if 'spec_bins' in xdata.coords:
- store_data(n_names[name_idx],
- data={
- 'x': interp_to_times,
- 'y': xdata_interpolated.values,
- 'v': xdata_interpolated.coords['spec_bins'].values
- })
+ store(n_names[name_idx],
+ data={
+ 'x': interp_to_times,
+ 'y': xdata_interpolated.values,
+ 'v': xdata_interpolated.coords['spec_bins'].values
+ },
+ metadata=metadata)
else:
- store_data(n_names[name_idx], data={'x': interp_to_times,
- 'y': xdata_interpolated.values})
+ store(n_names[name_idx], data={'x': interp_to_times,
+ 'y': xdata_interpolated.values}, metadata=metadata)
- print('tinterpol (' + method + ') was applied to: '
- + n_names[name_idx])
+ logging.info('tinterpol (' + method + ') was applied to: ' + n_names[name_idx])
diff --git a/pyspedas/analysis/tnormalize.py b/pyspedas/analysis/tnormalize.py
index 8e05c076..de9474fd 100644
--- a/pyspedas/analysis/tnormalize.py
+++ b/pyspedas/analysis/tnormalize.py
@@ -1,6 +1,5 @@
-
-import numpy as np
-from pytplot import get_data, store_data
+import logging
+import pytplot
def tnormalize(variable, newname=None, return_data=False):
"""
@@ -26,29 +25,6 @@ def tnormalize(variable, newname=None, return_data=False):
is set
"""
- metadata_in = {}
- if isinstance(variable, str):
- data_in = get_data(variable)
- metadata_in = get_data(variable, metadata=True)
- data = data_in[1]
- times = data_in[0]
- else:
- data = np.atleast_2d(variable)
- times = np.zeros(data.shape[0])
-
- n = np.sqrt(np.nansum(data**2, axis=1))
-
- # to do element-wise division, the magnitude needs to be repeated for each component
- norm_reshaped = np.reshape(n, [len(times), 1])
- norm_mag = np.repeat(norm_reshaped, len(data[0, :]), axis=1)
-
- data_norm = data/norm_mag
-
- if return_data:
- return data_norm
- else:
- if newname is None:
- newname = variable + '_normalized'
- store_data(newname, data={'x': times, 'y': data_norm}, attr_dict=metadata_in)
- return newname
-
+ logging.info("tnormalize has been moved to the pytplot.tplot_math module. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.tplot_math.tnormalize(variable=variable,newname=newname,return_data=return_data)
\ No newline at end of file
diff --git a/pyspedas/analysis/tsmooth.py b/pyspedas/analysis/tsmooth.py
index 6a65fe7d..50bbf6ed 100644
--- a/pyspedas/analysis/tsmooth.py
+++ b/pyspedas/analysis/tsmooth.py
@@ -9,9 +9,7 @@
Also, see: https://www.harrisgeospatial.com/docs/SMOOTH.html
"""
-import math
-import numpy as np
-import pyspedas
+import logging
import pytplot
@@ -34,28 +32,9 @@ def smooth(data, width=10, preserve_nans=None):
Smoothed data.
"""
- result = data.copy()
- N = len(data)
-
- if N <= width:
- print("smooth: Not enough points.")
- return result
-
- for i, d in enumerate(data):
- if (i >= (width-1)/2) and (i <= N-(width+1)/2):
- if (preserve_nans is not None) and data[i] is np.NaN:
- continue
- tsum = 0
- count = 0
- for j in range(int(width)):
- idx = math.ceil(i+j-width/2)
- if data[idx] is not np.NaN:
- tsum += data[idx]
- count += 1
- if count > 0: # otherwise, all NaN
- result[i] = (1/width) * tsum
- return result
-
+ logging.info("smooth has been moved to the pytplot.tplot_math module. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.tplot_math.smooth(data=data,width=width,preserve_nans=preserve_nans)
def tsmooth(names, width=10, median=None, preserve_nans=None,
new_names=None, suffix=None, overwrite=None):
@@ -85,44 +64,7 @@ def tsmooth(names, width=10, median=None, preserve_nans=None,
None.
"""
- old_names = pyspedas.tnames(names)
-
- if len(old_names) < 1:
- print('tsmooth error: No pytplot names were provided.')
- return
-
- if suffix is None:
- suffix = '-s'
-
- if overwrite is not None:
- n_names = old_names
- elif new_names is None:
- n_names = [s + suffix for s in old_names]
- else:
- n_names = new_names
-
- if isinstance(n_names, str):
- n_names = [n_names]
-
- if len(n_names) != len(old_names):
- n_names = [s + suffix for s in old_names]
-
- for i, old in enumerate(old_names):
- new = n_names[i]
-
- if new != old:
- pyspedas.tcopy(old, new)
-
- data = pytplot.data_quants[new].values
-
- dim = data.shape
- if len(dim) == 1:
- data = smooth(data, width=width, preserve_nans=preserve_nans)
- else:
- for k in range(dim[1]):
- data[:, k] = smooth(data[:, k], width=width,
- preserve_nans=preserve_nans)
-
- pytplot.data_quants[new].values = data
-
- print('tsmooth was applied to: ' + new)
+ logging.info("tsmooth has been moved to the pytplot.tplot_math module. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ pytplot.tplot_math.tsmooth(names=names,width=width,median=median,preserve_nans=preserve_nans,new_names=new_names,suffix=suffix,
+ overwrite=overwrite)
diff --git a/pyspedas/analysis/twavpol.py b/pyspedas/analysis/twavpol.py
index ea98bb5a..d757d4b1 100644
--- a/pyspedas/analysis/twavpol.py
+++ b/pyspedas/analysis/twavpol.py
@@ -68,19 +68,25 @@
Care should be taken in evaluating degree of polarisation results.
For meaningful results there should be significant wave power at the
frequency where the polarisation approaches
- 100%. Remembercomparing two straight lines yields 100% polarisation.
+ 100%. Remember, comparing two straight lines yields 100% polarisation.
"""
-
+import logging
import warnings
import numpy as np
+# use nansum from bottleneck if it's installed, otherwise use the numpy one
+try:
+ import bottleneck as bn
+ nansum = bn.nansum
+except ImportError:
+ nansum = np.nansum
from pytplot import get_data, store_data, options
from pyspedas import tnames
# these routines require numpy v1.20.0 or later
if np.__version__ < '1.20':
- print('Error: numpy 1.20.0 or later is required for wave polarization calculations. ')
- print('Please update numpy with: pip install numpy --upgrade')
+ logging.error('Error: numpy 1.20.0 or later is required for wave polarization calculations. ')
+ logging.error('Please update numpy with: pip install numpy --upgrade')
breakpoint()
@@ -97,6 +103,7 @@ def wpol_ematspec(i1, i2, i3, i4, aa, nosmbins, matspec):
"""Calculate ematspec array."""
id0 = (i2 - int((nosmbins-1)/2))
id1 = (i2 + int((nosmbins-1)/2)) + 1
+ # Using nansum() rather than sum() here results in a mismatch between IDL and Python results.
res = np.sum(aa[0:nosmbins] * matspec[i1, id0:id1, i3, i4])
return res
@@ -183,10 +190,10 @@ def wpol_helicity(nosteps, nopfft, KK, ematspec, waveangle):
for k in range(int(nopfft/2)):
for k1 in range(3):
- upper = np.sum(2*np.real(lambdau[KK, k, k1, 0:3]) *
+ upper = nansum(2*np.real(lambdau[KK, k, k1, 0:3]) *
np.imag(lambdau[KK, k, k1, 0:3]))
la2 = np.imag(lambdau[KK, k, k1, 0:3])**2
- lower = np.sum(np.real(lambdau[KK, k, k1, 0:3])**2 - la2)
+ lower = nansum(np.real(lambdau[KK, k, k1, 0:3])**2 - la2)
gammay = np.nan
if np.isfinite(upper) and np.isfinite(lower):
if upper > 0.0:
@@ -196,6 +203,7 @@ def wpol_helicity(nosteps, nopfft, KK, ematspec, waveangle):
lambday[KK, k, k1, :] = (np.exp((0.0 - 1j*0.5*gammay)) *
lambdau[KK, k, k1, :])
lay2 = np.imag(lambday[KK, k, k1, 0:3])**2
+ # Using nansum() rather than sum() in the helicity calculation results in a mismatch betweeen IDL and Python results.
helicity[KK, k, k1] = (1 /
(np.sqrt(np.real(lambday[KK, k, k1, 0])**2 +
np.real(lambday[KK, k, k1, 1])**2 +
@@ -247,11 +255,11 @@ def wavpol(ct, bx, by, bz,
----------
ct : list of float
Time.
- b1 : list of float
+ bx : list of float
Bx field.
- b2 : list of float
+ by : list of float
By field.
- b3 : list of float
+ bz : list of float
Bz field.
nopfft : int, optional
Number of points in FFT. The default is 256.
@@ -319,10 +327,9 @@ def wavpol(ct, bx, by, bz,
endsampfreq = 1./(ct[nopoints-1]-ct[nopoints-2])
if beginsampfreq != endsampfreq:
- print('wavpol Warning: file sampling frequency changes from ',
- beginsampfreq, 'Hz to ', endsampfreq, 'Hz')
+ logging.warning('wavpol Warning: file sampling frequency changes from ' + str(beginsampfreq) + 'Hz to ' + str(endsampfreq) + 'Hz')
else:
- print('wavpol: File sampling frequency=', beginsampfreq, 'Hz')
+ logging.warning('wavpol: File sampling frequency=' + str(beginsampfreq) + 'Hz')
samp_freq = beginsampfreq
samp_per = 1./samp_freq
@@ -352,8 +359,8 @@ def wavpol(ct, bx, by, bz,
# If there are too many batches, return.
if n_batches > 80000.0:
- print("wavpol error: Large number of batches. " +
- "Returning to avoid memory runaway.")
+ logging.error("wavpol error: Large number of batches. " +
+ "Returning to avoid memory runaway.")
err_flag = 1
result = (timeline, freqline, powspec, degpol, waveangle,
elliptict, helict, pspec3, err_flag)
@@ -363,19 +370,28 @@ def wavpol(ct, bx, by, bz,
# Total numbers of FFT calculations including 1 leap frog for each batch
ind_batch0 = 0
nosteps = 0
- print('n_batches', n_batches)
+ logging.info('n_batches: ' + str(n_batches))
for i in range(n_batches):
nosteps = int(nosteps + np.floor((errs[i] - ind_batch0)/steplength))
ind_batch0 = errs[i]
nosteps = nosteps + n_batches
- print('Total number of steps:', nosteps)
+ logging.info('Total number of steps:' + str(nosteps))
# leveltplot = 0.000001 # Power rejection level 0 to 1
nosmbins = bin_freq # No. of bins in frequency domain
# Smoothing profile based on Hanning:
- aa = np.array([0.024, 0.093, 0.232, 0.301, 0.232, 0.093, 0.024])
+ # aa = np.array([0.024, 0.093, 0.232, 0.301, 0.232, 0.093, 0.024])
+ # The predefined smoothing array aa is incorrect unless bin_freq = 7. For smaller
+ # values of bin_freq, only the first few entries will be used, and the values will be
+ # asymmetric and unnormalized. For larger values, out-of-bound array indices can occur.
+ # Now we generate a properly sized Hamming array (not Hann/hanning!). The code below
+ # will match the old predefined values for bin_freq = 7. JWL 2023-02-02
+
+ w = np.hamming(bin_freq)
+ tot = np.sum(w)
+ aa = w/tot
ind0 = 0
KK = 0
@@ -426,8 +442,7 @@ def wavpol(ct, bx, by, bz,
ngood = np.count_nonzero(~np.isnan(xs)) # Count finite data.
if ngood > nopfft:
nbp_fft_batches[batch] = np.floor(ngood/steplength)
- print('Total number of possible FFT in the batch no ', batch,
- ' is:', nbp_fft_batches[batch])
+ logging.info('Total number of possible FFT in the batch no ' + str(batch) + ' is:' + str(nbp_fft_batches[batch]))
ind0_fft = 0
for j in range(int(nbp_fft_batches[batch])):
# ind1_fft = nopfft * (j+1)-1
@@ -516,10 +531,16 @@ def wavpol(ct, bx, by, bz,
# Calculation of the degree of polarization.
# Calculation of square of smoothed spec matrix.
- for k1 in range(3):
- for k2 in range(3):
- matsqrd[KK, :, k1, k2] = wpol_matsqrd(KK, k1, k2,
- ematspec)
+ # for k1 in range(3):
+ # for k2 in range(3):
+ # matsqrd[KK, :, k1, k2] = wpol_matsqrd(KK, k1, k2,
+ # ematspec)
+
+ # A user suggested using the @ operator (shorthand for np.matmul()) to square the ematspec array,
+ # since it already deals with any leading dimensions. (Note that only the KKth and lower slices are
+ # initialized at this point.) -- JWL 2023-04-20
+
+ matsqrd[KK] = ematspec[KK] @ ematspec[KK]
trmatsqrd[KK, :] = np.real(matsqrd[KK, :, 0, 0] +
matsqrd[KK, :, 1, 1] +
@@ -574,10 +595,7 @@ def wavpol(ct, bx, by, bz,
# Print an indication that a computation is happening.
if KK == 0 or KK % 40 == 0:
- print(' ')
- print('wavpol step', KK, ' ', end='')
- elif KK % 4 == 0:
- print('.', end='')
+ logging.info('wavpol step: ' + str(KK) + ' ')
KK += 1
# End loop "for j"
@@ -598,9 +616,9 @@ def wavpol(ct, bx, by, bz,
# End "if ngood > nopfft"
else:
binwidth = samp_freq/nopfft
- print('Fourier Transform is not possible. ',
- 'Ngood = ', ngood,
- 'Required number of points for FFT = ', nopfft)
+ logging.error('Fourier Transform is not possible. ')
+ logging.error('Ngood = ' + str(ngood))
+ logging.error('Required number of points for FFT = ' + str(nopfft))
timeline[KK] = (ct[ind0] +
np.abs(int(nopfft/2))/samp_freq +
@@ -627,7 +645,7 @@ def wavpol(ct, bx, by, bz,
# Returns results.
result = (timeline, freqline, powspec, degpol, waveangle,
elliptict, helict, pspec3, err_flag)
- print('\nwavpol completed successfully')
+ logging.info('\nwavpol completed successfully')
return result
@@ -664,26 +682,26 @@ def twavpol(tvarname, prefix='', nopfft=-1, steplength=-1, bin_freq=-1):
all_names = tnames(tvarname)
if len(all_names) < 1:
- print('twavpol error: No valid pytplot variables match tvarname.')
+ logging.error('twavpol error: No valid pytplot variables match tvarname.')
return 0
xdata = get_data(tvarname)
ct = xdata.times
if len(ct) < 2:
- print('twavpol error: Time variable does not have enough points.')
+ logging.error('twavpol error: Time variable does not have enough points.')
return 0
bfield = xdata.y
if bfield.ndim != 2:
- print('twavpol error: Data should have 2 dimensions.')
+ logging.error('twavpol error: Data should have 2 dimensions.')
return 0
b1 = bfield[:, 0]
b2 = bfield[:, 1]
b3 = bfield[:, 2]
if (len(ct) != len(b1) or len(ct) != len(b2) or len(ct) != len(b3)):
- print('twavpol error: Number of time elements does not match' +
- 'number of magnetic field elements.')
+ logging.error('twavpol error: Number of time elements does not match' +
+ 'number of magnetic field elements.')
return 0
# Apply vawpol.
@@ -693,7 +711,7 @@ def twavpol(tvarname, prefix='', nopfft=-1, steplength=-1, bin_freq=-1):
bin_freq=bin_freq)
if err_flag == 1:
- print('twavpol error: There were errors while applying wavpol.')
+ logging.error('twavpol error: There were errors while applying wavpol.')
return 0
# Store new pytplot variables as spectrograms.
diff --git a/pyspedas/analysis/wavelet.py b/pyspedas/analysis/wavelet.py
index 3c05c1e6..1e6e143b 100644
--- a/pyspedas/analysis/wavelet.py
+++ b/pyspedas/analysis/wavelet.py
@@ -10,6 +10,7 @@
http://spedas.org/wiki/index.php?title=Wavelet
"""
+import logging
import numpy as np
import pywt
import pytplot
@@ -49,7 +50,7 @@ def wavelet(names, new_names=None, suffix='_pow', wavename='morl', scales=None,
powervar = []
if len(varnames) < 1:
- print('wavelet error: No pytplot names were provided.')
+ logging.error('wavelet error: No pytplot names were provided.')
return
if scales is None:
@@ -69,7 +70,7 @@ def wavelet(names, new_names=None, suffix='_pow', wavename='morl', scales=None,
data = alldata[1]
if len_time < 2:
- print('wavelet error: Not enought data points for ' + old)
+ logging.error('wavelet error: Not enought data points for ' + old)
continue
coef, freqs = pywt.cwt(data, scales=scales, wavelet=wavename,
@@ -81,6 +82,6 @@ def wavelet(names, new_names=None, suffix='_pow', wavename='morl', scales=None,
pytplot.options(new, 'spec', 1)
powervar.append(new)
- print('wavelet was applied to: ' + new)
+ logging.info('wavelet was applied to: ' + new)
return powervar
diff --git a/pyspedas/analysis/yclip.py b/pyspedas/analysis/yclip.py
index f65bcaf1..006960c5 100644
--- a/pyspedas/analysis/yclip.py
+++ b/pyspedas/analysis/yclip.py
@@ -7,7 +7,7 @@
This function clips y-axis data. To clip time-axis, use time_clip.
"""
-
+import logging
import pyspedas
import pytplot
import numpy as np
@@ -45,7 +45,7 @@ def yclip(names, ymin, ymax, flag=None, new_names=None, suffix=None,
old_names = pyspedas.tnames(names)
if len(old_names) < 1:
- print('yclip error: No pytplot names were provided.')
+ logging.error('yclip error: No pytplot names were provided.')
return
if suffix is None:
@@ -72,4 +72,4 @@ def yclip(names, ymin, ymax, flag=None, new_names=None, suffix=None,
data = pytplot.clip(old, ymin, ymax, new)
- print('yclip was applied to: ' + new)
+ logging.info('yclip was applied to: ' + new)
diff --git a/pyspedas/barrel/README.md b/pyspedas/barrel/README.md
new file mode 100644
index 00000000..d91410da
--- /dev/null
+++ b/pyspedas/barrel/README.md
@@ -0,0 +1,77 @@
+
+## Balloon Array for Radiation belt Relativistic Electron Losses (BARREL)
+The routines in this module can be used to load data from the BARREL mission.
+
+### Data Types
+Each Payload returns seven datatypes.
+- Slow Spectra (sspc) - 256 channel, 32 second accumulation
+- Medium Spectra (mspc) - 48 channel, 4 second accumulation
+- Fast Spectra (fspc) - 4 channel (or 6 channel), 20Hz data
+- Magnetometer (magn)
+- Ephemeris (ephm)
+- Rate Counters (rcnt)
+- Housekeeping (hkpg)
+
+### Ballon identifiers
+Ballons have a two-digit, alphanumeric identifier. The first digit is a number indicating campagin, the second is a letter indiating the flight.
+There have been seven BARREL campaigns, each with anywhere between 1 and 20 flights.
+- 1a - 2013-01-28 - 2013-02-14
+- 1b - 2013-01-02 - 2013-01-09
+- 1c - 2013-01-16 - 2013-01-26
+- 1d - 2013-01-04 - 2013-01-21
+- 1g - 2013-01-14 - 2013-01-27
+- 1h - 2013-01-19 - 2013-02-11
+- 1i - 2013-01-09 - 2013-02-16
+- 1j - 2013-01-01 - 2013-01-14
+- 1k - 2013-01-05 - 2013-01-21
+- 1m - 2013-01-06 - 2013-01-12
+- 1n - 2013-01-08 - 2013-01-16
+- 1o - 2013-01-09 - 2013-01-17
+- 1q - 2013-01-20 - 2013-02-06
+- 1r - 2013-01-21 - 2013-01-23
+- 1s - 2013-01-22 - 2013-01-27
+- 1t - 2013-01-24 - 2013-02-15
+- 1u - 2013-01-26 - 2013-02-09
+- 1v - 2013-01-30 - 2013-02-06
+- 2a - 2014-01-16 - 2014-02-04
+- 2b - 2014-01-17 - 2014-02-07
+- 2c - 2014-01-20 - 2014-01-25
+- 2d - 2014-01-01 - 2014-01-01
+- 2e - 2014-01-27 - 2014-01-29
+- 2f - 2014-02-01 - 2014-02-03
+- 2i - 2013-12-31 - 2014-01-14
+- 2k - 2014-01-04 - 2014-01-12
+- 2l - 2014-01-06 - 2014-02-03
+- 2m - 2014-01-11 - 2014-01-13
+- 2n - 2014-01-18 - 2014-01-19
+- 2o - 2014-01-26 - 2014-02-09
+- 2p - 2014-01-30 - 2014-02-11
+- 2q - 2014-02-02 - 2014-02-04
+- 2t - 2013-01-27 - 2014-01-17
+- 2w - 2013-12-31 - 2014-01-11
+- 2x - 2014-01-04 - 2014-01-22
+- 2y - 2014-01-11 - 2014-01-19
+- 3a - 2015-08-10 - 2015-08-10
+- 3b - 2015-08-13 - 2015-08-13
+- 3c - 2015-08-17 - 2015-08-18
+- 3d - 2015-08-19 - 2015-08-19
+- 3e - 2015-08-21 - 2015-08-22
+- 3f - 2015-08-25 - 2015-08-26
+- 3g - 2015-08-25 - 2015-08-26
+- 4a - 2016-08-13 - 2016-08-14
+- 4b - 2016-08-16 - 2016-08-17
+- 4c - 2016-08-21 - 2016-08-22
+- 4d - 2016-08-21 - 2016-08-22
+- 4e - 2016-08-24 - 2016-08-25
+- 4f - 2016-08-28 - 2016-08-29
+- 4g - 2016-08-29 - 2016-08-30
+- 4h - 2016-08-30 - 2016-08-31
+- 5a - 2018-06-25 - 2018-06-26
+- 6a - 2018-02-09 - 2019-02-21
+- 7a - 2019-12-29 - 2020-04-01
+
+### Examples
+Data can be loaded using the wrapper functions for each data type:
+`pyspedas.barrel.sspc(probe='1A', trange= ['2013-01-28', '2013-02-14'])`
+
+Full example notebooks available at https://github.com/spedas/pyspedas_examples
\ No newline at end of file
diff --git a/pyspedas/barrel/__init__.py b/pyspedas/barrel/__init__.py
new file mode 100644
index 00000000..7c114e1d
--- /dev/null
+++ b/pyspedas/barrel/__init__.py
@@ -0,0 +1,57 @@
+from .load import load
+
+def sspc(trange=['2013-01-28', '2013-01-29'],
+ probe='1A',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+
+ return load(datatype='sspc', trange=trange, probe=probe, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+def mspc(trange=['2013-01-29','2013-01-30'],
+ probe='1A',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+
+ return load(datatype='mspc', trange=trange, probe=probe, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+def fspc(trange=['2013-01-29','2013-01-30'],
+ probe='1A',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+
+ return load(datatype='fspc',trange=trange, probe=probe, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+def rcnt(trange=['2013-01-29','2013-01-30'],
+ probe='1A',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+
+ return load(datatype='rcnt', trange=trange, probe=probe, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+def magn(trange=['2013-01-29','2013-01-30'],
+ probe='1A',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+
+ return load(datatype='magn', trange=trange, probe=probe, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+def ephm(trange=['2013-01-29','2013-01-30'],
+ probe='1A',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+
+ return load(datatype='ephm', trange=trange, probe=probe, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+def hkpg(trange=['2013-01-29','2013-01-30'],
+ probe='1A',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+
+ return load(datatype='hkpg', trange=trange, probe=probe, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
diff --git a/pyspedas/barrel/config.py b/pyspedas/barrel/config.py
new file mode 100644
index 00000000..2d6de4f4
--- /dev/null
+++ b/pyspedas/barrel/config.py
@@ -0,0 +1,70 @@
+import os
+
+CONFIG = {
+ 'local_data_dir': 'barrel_data/',
+ 'remote_data_dir': 'https://spdf.gsfc.nasa.gov/pub/data/barrel/',
+ 'defaults': {
+ '1a': {'trange': ['2013-01-28', '2013-02-14']},
+ '1b': {'trange': ['2013-01-02', '2013-01-09']},
+ '1c': {'trange': ['2013-01-16', '2013-01-26']},
+ '1d': {'trange': ['2013-01-04', '2013-01-21']},
+ '1g': {'trange': ['2013-01-14', '2013-01-27']},
+ '1h': {'trange': ['2013-01-19', '2013-02-11']},
+ '1i': {'trange': ['2013-01-09', '2013-02-16']},
+ '1j': {'trange': ['2013-01-01', '2013-01-14']},
+ '1k': {'trange': ['2013-01-05', '2013-01-21']},
+ '1m': {'trange': ['2013-01-06', '2013-01-12']},
+ '1n': {'trange': ['2013-01-08', '2013-01-16']},
+ '1o': {'trange': ['2013-01-09', '2013-01-17']},
+ '1q': {'trange': ['2013-01-20', '2013-02-06']},
+ '1r': {'trange': ['2013-01-21', '2013-01-23']},
+ '1s': {'trange': ['2013-01-22', '2013-01-27']},
+ '1t': {'trange': ['2013-01-24', '2013-02-15']},
+ '1u': {'trange': ['2013-01-26', '2013-02-09']},
+ '1v': {'trange': ['2013-01-30', '2013-02-06']},
+ '2a': {'trange': ['2014-01-16', '2014-02-04']},
+ '2b': {'trange': ['2014-01-17', '2014-02-07']},
+ '2c': {'trange': ['2014-01-20', '2014-01-25']},
+ '2d': {'trange': ['2014-01-01', '2014-01-01']},
+ '2e': {'trange': ['2014-01-27', '2014-01-29']},
+ '2f': {'trange': ['2014-02-01', '2014-02-03']},
+ '2i': {'trange': ['2013-12-31', '2014-01-14']},
+ '2k': {'trange': ['2014-01-04', '2014-01-12']},
+ '2l': {'trange': ['2014-01-06', '2014-02-03']},
+ '2m': {'trange': ['2014-01-11', '2014-01-13']},
+ '2n': {'trange': ['2014-01-18', '2014-01-19']},
+ '2o': {'trange': ['2014-01-26', '2014-02-09']},
+ '2p': {'trange': ['2014-01-30', '2014-02-11']},
+ '2q': {'trange': ['2014-02-02', '2014-02-04']},
+ '2t': {'trange': ['2013-01-27', '2014-01-17']},
+ '2w': {'trange': ['2013-12-31', '2014-01-11']},
+ '2x': {'trange': ['2014-01-04', '2014-01-22']},
+ '2y': {'trange': ['2014-01-11', '2014-01-19']},
+ '3a': {'trange': ['2015-08-10', '2015-08-10']},
+ '3b': {'trange': ['2015-08-13', '2015-08-13']},
+ '3c': {'trange': ['2015-08-17', '2015-08-18']},
+ '3d': {'trange': ['2015-08-19', '2015-08-19']},
+ '3e': {'trange': ['2015-08-21', '2015-08-22']},
+ '3f': {'trange': ['2015-08-25', '2015-08-26']},
+ '3g': {'trange': ['2015-08-25', '2015-08-26']},
+ '4a': {'trange': ['2016-08-13', '2016-08-14']},
+ '4b': {'trange': ['2016-08-16', '2016-08-17']},
+ '4c': {'trange': ['2016-08-21', '2016-08-22']},
+ '4d': {'trange': ['2016-08-21', '2016-08-22']},
+ '4e': {'trange': ['2016-08-24', '2016-08-25']},
+ '4f': {'trange': ['2016-08-28', '2016-08-29']},
+ '4g': {'trange': ['2016-08-29', '2016-08-30']},
+ '4h': {'trange': ['2016-08-30', '2016-08-31']},
+ '5a': {'trange': ['2018-06-25', '2018-06-26']},
+ '6a': {'trange': ['2018-02-09', '2019-02-21']},
+ '7a': {'trange': ['2019-12-29', '2020-04-01']}
+ }
+}
+
+# override local data directory with environment variables
+if os.environ.get('SPEDAS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.sep.join(
+ [os.environ['SPEDAS_DATA_DIR'], 'barrel'])
+
+if os.environ.get('GOES_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.environ['BARREL_DATA_DIR']
\ No newline at end of file
diff --git a/pyspedas/barrel/load.py b/pyspedas/barrel/load.py
new file mode 100644
index 00000000..e812f11b
--- /dev/null
+++ b/pyspedas/barrel/load.py
@@ -0,0 +1,68 @@
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+def load(trange=None,
+ probe='1A',
+ datatype='sspc',
+ level='l2',
+ version='v10',
+ get_support_data=False,
+ files='',
+ notplot=False,
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the BARREL mission; this function is not meant
+ to be called directly; instead, see the wrappers:
+ pyspedas.barrel.sspc
+ pyspedas.barrel.mspc
+ pyspedas.barrel.fspc
+ pyspedas.barrel.magn
+ pyspedas.barrel.ephm
+ pyspedas.barrel.rcnt
+ pyspedas.barrel.hkpg
+
+ """
+
+
+ if not isinstance(probe, list):
+ probe = [probe]
+
+ if trange is None:
+ trange = CONFIG['defaults'][probe[0]]
+
+ out_files = []
+ for prb in probe:
+ remote_path = (
+ str(level) + '/' + str(prb) + '/' + str(datatype) +
+ '/bar_' + str(prb) + '_' + str(level) + '_' + str(datatype) + '_%Y%m%d_' + str(version) + '.cdf'
+ )
+
+ remote_names = [name.lower() for name in dailynames(file_format=remote_path, trange=trange)]
+
+ files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'],
+ local_path=CONFIG['local_data_dir'], no_download=no_update)
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ out_files = sorted(out_files)
+
+ if downloadonly:
+ return out_files
+
+ tvars = cdf_to_tplot(out_files, get_support_data=get_support_data, notplot=notplot)
+
+ if tvars is None:
+ return
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
diff --git a/pyspedas/barrel/tests/__init__.py b/pyspedas/barrel/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/barrel/tests/tests.py b/pyspedas/barrel/tests/tests.py
new file mode 100644
index 00000000..176856f5
--- /dev/null
+++ b/pyspedas/barrel/tests/tests.py
@@ -0,0 +1,16 @@
+
+import os
+import unittest
+from pyspedas.utilities.data_exists import data_exists
+
+import pyspedas
+
+
+class LoadTestCases(unittest.TestCase):
+ def test_downloadonly(self):
+ mag_files = pyspedas.barrel.sspc(probe='1A', downloadonly=True)
+ self.assertTrue(os.path.exists(mag_files[0]))
+
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
diff --git a/pyspedas/cdagui/cdagui.py b/pyspedas/cdagui/cdagui.py
index 940fb1eb..390a993d 100644
--- a/pyspedas/cdagui/cdagui.py
+++ b/pyspedas/cdagui/cdagui.py
@@ -3,317 +3,198 @@
A GUI that can download data files from CDAWeb
and load them into pytplot variables.
-Requires cdasws, PyQt5.
-
-To open the GUI window:
- from pyspedas.cdagui.cdagui import cdagui
- x = cdagui()
+Requires cdasws.
For cdasws documentation, see:
https://pypi.org/project/cdasws/
https://cdaweb.gsfc.nasa.gov/WebServices/REST/py/cdasws/index.html
-Notes:
- To start the gui from the command line:
- python pyspedas/cdagui/cdagui.py
- To start the gui inside the python environment:
- exec(open('cdagui.py').read())
+To open the GUI window:
+ from pyspedas.cdagui.cdagui import cdagui
+ x = cdagui()
+To start the gui from the command line, use:
+ python pyspedas/cdagui/cdagui.py
"""
-import sys
-import datetime
-from PyQt5.QtCore import Qt, QDate, QCoreApplication
-from PyQt5.QtWidgets import (QApplication, QWidget, QMainWindow,
- QGridLayout, QPushButton, QListWidget,
- QGroupBox, QCheckBox, QMessageBox,
- QVBoxLayout, QLabel, QLineEdit,
- QFileDialog, QCalendarWidget, QDialog)
+import os
+import time
+import tkinter as tk
+from tkinter import messagebox, filedialog
from cdaweb import CDAWeb
from config import CONFIG
-appx = QApplication(sys.argv)
-
-
-def show_my_message(self, title, msg):
- """Show a message."""
- alert = QMessageBox(self)
- alert.setWindowTitle(title)
- alert.setText(msg)
- alert.setIcon(2)
- alert.show()
- return alert
-
-
-class cdagui(QMainWindow):
- """Main CDAWeb Window."""
-
- def __init__(self, parent=None):
- """Inilitalize."""
- super().__init__()
- self.main_widget = GUIWidget(self)
- self.setCentralWidget(self.main_widget)
- self.init_UI()
-
- def init_UI(self):
- """Start GUI."""
- self.setWindowTitle('CDAWeb Data Downloader')
- self.statusbar = self.statusBar()
- self.statusbar.showMessage('Status: Ready')
- self.showMaximized()
-
-
-class GUIWidget(QWidget):
- """Main GUI class."""
-
- def __init__(self, parent):
- """Initialize widget."""
- super(GUIWidget, self).__init__(parent)
- self.parent = parent
- self.cda = CDAWeb()
- self.title_css = "background-color:#AFEEEE;\
- font-family:Verdana;font-size:14px;"
- self.button_css = "background-color:#AFEEAF;font-family:Verdana;"
- self.clear_css = "background-color:#E9967A;font-family:Verdana;"
- self.initUI()
-
- def createMissionGroupBox(self):
- """1. Missions and instruments group of GUI."""
-
- def button1_find_datasets():
- title = "Find Datasets"
- self.dataset_box.clear()
- self.file_box.clear()
- mission_list = [item.text() for item in
- self.mission_box.selectedItems()]
- instrument_list = [item.text() for item in
- self.instrument_box.selectedItems()]
- if len(mission_list) < 1 or len(instrument_list) < 1:
- msg = "Please select at least one mission and one instrument."
- aaa = show_my_message(self.parent, title, msg)
- return 0
- datasets = self.cda.get_datasets(mission_list, instrument_list)
- datalen = len(datasets)
- if datalen < 1:
- msg = "No datasets were found with these parameters."
- aaa = show_my_message(self.parent, title, msg)
- return 0
- elif datalen > 50:
- msg = "Number of datasets found: " + str(datalen)
- msg += "\nOnly 50 will be shown."
- aaa = show_my_message(self.parent, title, msg)
- self.mission_selected.setText(str(mission_list))
- self.instrument_selected.setText(str(instrument_list))
- self.dataset_box.addItems(datasets[:50])
-
- # Missions group GUI elements
- self.missionGroupBox = QGroupBox("Missions and Instruments")
-
- label1 = QLabel("Mission Groups:")
- list1 = QListWidget(self)
- list1.setSelectionMode(QListWidget.MultiSelection)
- list1.setMinimumHeight(50)
- list1.setMinimumWidth(400)
- list1.addItems(self.cda.get_observatories())
- self.mission_box = list1
-
- label2 = QLabel("Instrument Types:")
- list2 = QListWidget(self)
- list2.setSelectionMode(QListWidget.MultiSelection)
- list2.setMinimumHeight(50)
- list2.setMinimumWidth(400)
- list2.addItems(self.cda.get_instruments())
- self.instrument_box = list2
-
- label3 = QLabel("Select one or more Mission Group(s) and one"
- + " or more Instrument Type(s) and press:")
- button1 = QPushButton("1. Find Datasets")
- button1.setStyleSheet(self.button_css)
- button1.clicked.connect(button1_find_datasets)
-
- # Create the layout and add GUI elements
- # row, column, rowSpan, columnSpan
- layout = QGridLayout()
- layout.addWidget(label1, 0, 0)
- layout.addWidget(label2, 0, 1)
- layout.addWidget(list1, 1, 0)
- layout.addWidget(list2, 1, 1)
- layout.addWidget(label3, 2, 0, 1, 1)
- layout.addWidget(button1, 2, 1, 1, 1)
-
- self.missionGroupBox.setLayout(layout)
-
- def createDatasetBox(self):
- """2. Dataset group of GUI."""
- # Datasets group GUI elements
- self.datasetGroupBox = QGroupBox("Datasets")
-
- label1 = QLabel("Selected Mission Groups:")
- ans1 = QLabel(" ")
- self.mission_selected = ans1
- ans1.setWordWrap(True)
-
- label2 = QLabel("Selected Instruments:")
- ans2 = QLabel(" ")
- self.instrument_selected = ans2
- ans2.setWordWrap(True)
-
- list1 = QListWidget(self)
- self.dataset_box = list1
- list1.setMinimumHeight(50)
- list1.setMinimumWidth(400)
- list1.setSelectionMode(QListWidget.MultiSelection)
-
- layout = QGridLayout()
- layout.addWidget(label1, 0, 0, 1, 1)
- layout.addWidget(ans1, 0, 1, 1, 15)
- layout.addWidget(label2, 1, 0, 1, 1)
- layout.addWidget(ans2, 1, 1, 1, 15)
- layout.addWidget(list1, 2, 0, 1, 16)
-
- self.datasetGroupBox.setLayout(layout)
-
- def createTimeGroupBox(self):
- """3. Date and time group of GUI."""
-
- def button2_get_file_list():
- title = "Get File List"
- self.file_box.clear()
- dataset_list = [item.text() for item in
- self.dataset_box.selectedItems()]
- t0 = self.time_start_box.text()
- t1 = self.time_end_box.text()
- if len(dataset_list) < 1 or len(t0) < 9 or len(t1) < 9:
- msg = "Please select at least one dataset and start-end times."
- aaa = show_my_message(self.parent, title, msg)
- return 0
- file_list = self.cda.get_filenames(dataset_list, t0, t1)
- filelen = len(file_list)
+class cdaWindow:
+
+ def __init__(self, master):
+ # CDAWeb connection
+ cda = CDAWeb()
+
+ # Defaults
+ default_start_time = "2023-01-01 00:00:00"
+ default_end_time = "2023-01-01 23:59:59"
+ download_box = tk.IntVar()
+ default_dir = CONFIG["local_data_dir"]
+ default_status = "Status: Ready!"
+
+ def status(txt):
+ # Change status bar, show wait cursor
+ if txt == "":
+ status_label.config(text=default_status)
+ master.config(cursor="")
+ else:
+ status_label.config(text="Status: " + str(txt))
+ master.config(cursor="wait")
+ time.sleep(0.2)
+
+ master.update_idletasks()
+ master.update()
+
+ def select_dir():
+ # Button: Select dir
+ initial_dir = str(dir_entry.get())
+ path = filedialog.askdirectory(parent=window, initialdir=initial_dir, title="Please select a directory", mustexist=True)
+ if os.path.exists(path):
+ dir_entry.delete(0, tk.END)
+ dir_entry.insert(0, path)
+
+ def clear_boxes():
+ # Button: Clear all list boxes
+ mission_list.selection_clear(0, tk.END)
+ instrument_list.selection_clear(0, tk.END)
+ dataset_list.delete(0, tk.END)
+ file_list.delete(0, tk.END)
+ label_groups.config(text="Selected Mission Groups:")
+ label_instruments.config(text="Selected Instrument Types:")
+
+ def exit_gui():
+ # Button: Exit
+ window.destroy()
+
+ def find_datasets():
+ # Button: 1. Find Datasets
+ msgtitle = "Find Datasets"
+
+ # Change status
+ status(msgtitle + "...")
+
+ datasets = []
+ dataset_list.delete(0, tk.END)
+ file_list.delete(0, tk.END)
+ sel_g = list(mission_list.curselection())
+ sel_g_val = [mission_list.get(index) for index in sel_g]
+ sel_i = list(instrument_list.curselection())
+ sel_i_val = [instrument_list.get(index) for index in sel_i]
+ if len(sel_g_val) < 1:
+ messagebox.showerror(msgtitle, "Please select one or more Mission Groups!")
+ elif len(sel_i_val) < 1:
+ messagebox.showerror(msgtitle, "Please select one or more Instrument Types!")
+ else:
+ label_groups.config(text="Selected Mission Groups: [" + ",".join(sel_g_val) + "]")
+ label_instruments.config(text="Selected Instrument Types: [" + ",".join(sel_i_val) + "]")
+
+ datasets = cda.get_datasets(sel_g_val, sel_i_val)
+ if len(datasets) < 1:
+ messagebox.showerror(msgtitle, "No datasets found for these parameters!")
+ else:
+ for i in datasets:
+ dataset_list.insert(tk.END, str(i))
+
+ status("") # Reset status
+
+ return datasets
+
+ def find_filelist():
+ # Button: 2. Get File List
+ msgtitle = "Get File List"
+
+ # Change status
+ status(msgtitle + "...")
+
+ file_list.delete(0, tk.END)
+ all_files = []
+ sel_d = list(dataset_list.curselection())
+ sel_d_val = [dataset_list.get(index) for index in sel_d]
+ if len(sel_d_val) < 1:
+ messagebox.showerror(msgtitle, "Please select one or more Datasets!")
+ status("") # Reset status
+ return []
+
+ t0 = str(start_time.get())
+ t1 = str(end_time.get())
+ if len(t0) < 1 or len(t1) < 1:
+ messagebox.showerror(msgtitle, "Please set start and end time!")
+ status("") # Reset status
+ return []
+
+ # Get all files for this dataset and start/end times.
+ all_files = cda.get_filenames(sel_d_val, t0, t1)
+ filelen = len(all_files)
if filelen < 1:
- msg = "No datasets were found with these parameters."
- aaa = show_my_message(self.parent, title, msg)
- return 0
+ msg = "No files were found with these parameters."
+ messagebox.showinfo(msgtitle, msg)
+ status("") # Reset status
+ return []
elif filelen > 50:
msg = "Number of files found: " + str(filelen)
msg += "\nOnly 50 will be shown."
- aaa = show_my_message(self.parent, title, msg)
- self.file_box.addItems(file_list[:50])
-
- def pick_time(start_or_end):
- """Date picker."""
- dlg = QDialog(self)
- gridc = QVBoxLayout()
- dlg.setLayout(gridc)
- if (start_or_end == "start"):
- title_str = "Start date"
+ messagebox.showinfo(msgtitle, msg)
+
+ # Add filenames to listbox (up to 50 files)
+ for i in all_files[:50]:
+ file_list.insert(tk.END, str(i))
+
+ status("") # Reset status
+
+ # Return all files, even if they are more than 50
+ return all_files
+
+ def get_data():
+ # Button: 3. Get Data
+ msgtitle = "Get Data"
+
+ # Change status
+ status(msgtitle + "...")
+
+ file_result = []
+ sel_f = list(file_list.curselection())
+ sel_f_val = [file_list.get(index) for index in sel_f]
+ if len(sel_f_val) < 1:
+ messagebox.showerror(msgtitle, "Please select one or more files to download!")
+ status("") # Reset status
+ return []
+
+ if download_box.get() == 1:
+ download_only = True
else:
- title_str = "End date"
- titlelabel = QLabel(title_str)
- gridc.addWidget(titlelabel)
-
- my_calendar = QCalendarWidget()
- my_calendar.setGridVisible(True)
- my_calendar.move(10, 20)
- gridc.addWidget(my_calendar)
-
- labeldate = QLabel("")
- gridc.addWidget(labeldate)
-
- def dial_exit():
- dlg.done(1)
-
- buttonc = QPushButton("Close")
- buttonc.clicked.connect(dial_exit)
- gridc.addWidget(buttonc)
-
- def show_date():
- date = my_calendar.selectedDate()
- date_string = date.toString('yyyy-MM-dd')
- if (start_or_end == "start"):
- self.time_start_box.setText(date_string + " 00:00:01")
- else:
- self.time_end_box.setText(date_string + " 23:59:59")
- labeldate.setText(date_string)
-
- my_calendar.clicked[QDate].connect(show_date)
-
- dlg.setWindowTitle("Calendar")
- dlg.exec_()
-
- # Date and Time group GUI elements
- self.timeGroupBox = QGroupBox("Date and Time")
-
- # By default show 7 days behind to ensure that there is data
- label1 = QLabel("Start Time:")
- t0 = datetime.datetime.strftime(datetime.datetime.now()
- - datetime.timedelta(7), '%Y-%m-%d')
- time1 = QLineEdit(str(t0) + " 00:00:01")
- self.time_start_box = time1
- button1 = QPushButton("Select")
- button1.clicked.connect(lambda: pick_time("start"))
-
- label2 = QLabel("End Time:")
- time2 = QLineEdit(str(t0) + " 23:59:59")
- self.time_end_box = time2
- button2 = QPushButton("Select")
- button2.clicked.connect(lambda: pick_time("end"))
-
- label3 = QLabel("Date and time format: YYYY-MM-DD[ HH:MM:SS]")
- button3 = QPushButton("2. Get File List")
- button3.setStyleSheet(self.button_css)
- button3.clicked.connect(button2_get_file_list)
-
- layout = QGridLayout()
- layout.addWidget(label1, 0, 0)
- layout.addWidget(time1, 0, 1)
- layout.addWidget(button1, 0, 2)
- layout.addWidget(label2, 0, 3)
- layout.addWidget(time2, 0, 4)
- layout.addWidget(button2, 0, 5)
- layout.addWidget(label3, 1, 0, 1, 3)
- layout.addWidget(button3, 1, 3, 1, 3)
-
- self.timeGroupBox.setLayout(layout)
-
- def createDownloadGroupBox(self):
- """4. Download group of GUI."""
-
- def button3_get_data():
- """Get data button."""
- title = "Download Files"
- file_list = [item.text() for item in self.file_box.selectedItems()]
- if len(file_list) < 1:
- msg = "Please select at least one file to download."
- aaa = show_my_message(self.parent, title, msg)
- return 0
- local_dir = self.dir_box.text()
+ download_only = False
+
+ local_dir = str(dir_entry.get())
if len(local_dir) < 1:
- msg = "Please select a local directory."
- aaa = show_my_message(self.parent, title, msg)
- return 0
- download_only = False
- if check1.isChecked():
- download_only = True
+ local_dir = default_dir
+ dir_entry.insert(0, default_dir)
+
- # The following can be slow, especially if there are multiple files
- self.parent.statusbar.showMessage('Status: Downloading, \
- please wait...')
- QApplication.setOverrideCursor(Qt.WaitCursor)
- QApplication.processEvents()
- result = self.cda.cda_download(file_list, local_dir, download_only)
- QApplication.restoreOverrideCursor()
- self.parent.statusbar.showMessage('Status: Ready')
+ # Get the files, and/or the tplot variables
+ # sesults is a list [remote filename, local filename, status]
+ result = cda.cda_download(sel_f_val, local_dir, download_only)
+ # Show a message about the results
filelen = len(result)
if filelen < 1:
msg = "No files were downloaded."
- aaa = show_my_message(self.parent, title, msg)
- return 0
+ messagebox.showerror(msgtitle, msg)
+ status("") # Reset status
+ return []
+
else:
+ file_result = result
count_no_downloads = 0
count_tplot_problem = 0
count_tplot = 0
for item in result:
+ if len(item) != 3:
+ continue
if item[2] == -1:
count_no_downloads += 1
elif item[2] == 0 and not download_only:
@@ -330,114 +211,217 @@ def button3_get_data():
msg += ("\nFiles loaded to pytplot: " + str(count_tplot))
msg += ("\nFiles that could not be loaded to pytplot: "
+ str(count_tplot_problem))
- aaa = show_my_message(self.parent, title, msg)
+ # Show final message with results
+ messagebox.showinfo(msgtitle, msg)
+
+ status("") # Reset status
+ return file_result
+
+ # Create the main window
+ window = master
+ window.title("CDAWeb Data Downloader")
+ window.geometry("800x600")
+ window.configure()
+ window.option_add("*font", "lucida 10")
+ window.state("zoomed") # Start maximazed
+ window.minsize(800, 600)
+
+ # Size of grid
+ no_of_cols = 2
+ no_of_rows = 14
+
+ # Create columns
+ for i in range(no_of_cols):
+ window.grid_columnconfigure(i, weight=1)
+
+ # Create rows
+ for i in range(no_of_rows):
+ window.grid_rowconfigure(i, weight=1)
+
+ # Row 0 - just a label
+ window.grid_rowconfigure(0, weight=0)
+ label00 = tk.Label(window, text="Download Data from CDAWeb", bg="#AFEEEE", font=("Helvetica", 10, "bold"))
+ label00.grid(row=0, columnspan=2, sticky="new")
+
+ # Row 1, 2 - Mission Groups and Instrument Types
+ window.grid_rowconfigure(1, weight=0)
+ label10 = tk.Label(window, text="Mission Groups:")
+ label10.grid(row=1, column=0, sticky="ws")
+
+ label11 = tk.Label(window, text="Instrument Types:")
+ label11.grid(row=1, column=1, sticky="ws")
+
+ cell20 = tk.Frame(window)
+ cell20.grid(row=2, column=0, sticky="nsew")
+ mission_list = tk.Listbox(cell20, selectmode=tk.MULTIPLE, exportselection=False)
+ mission_list.pack(side=tk.TOP, fill=tk.BOTH, padx=5, pady=5, ipadx=5, ipady=5, expand=True)
+ cdagroups = cda.get_observatories()
+ # cdagroups = [i for i in range(200)]
+ for g in cdagroups:
+ mission_list.insert(tk.END, str(g))
+ scrollbar20 = tk.Scrollbar(mission_list, orient="vertical")
+ scrollbar20.pack(side=tk.RIGHT, fill=tk.BOTH)
+ mission_list.config(yscrollcommand=scrollbar20.set)
+ scrollbar20.config(command=mission_list.yview)
+
+ cell21 = tk.Frame(window)
+ cell21.grid(row=2, column=1, sticky="nsew")
+ instrument_list = tk.Listbox(cell21, selectmode=tk.MULTIPLE, exportselection=False)
+ instrument_list.pack(side=tk.TOP, fill=tk.BOTH, padx=5, pady=5, ipadx=5, ipady=5, expand=True)
+ cdainstr = cda.get_instruments()
+ # cdainstr = [i for i in range(200)]
+ for i in cdainstr:
+ instrument_list.insert(tk.END, str(i))
+ scrollbar21 = tk.Scrollbar(instrument_list, orient="vertical")
+ scrollbar21.pack(side=tk.RIGHT, fill=tk.BOTH)
+ instrument_list.config(yscrollcommand=scrollbar21.set)
+ scrollbar21.config(command=instrument_list.yview)
+
+ # Row 3 - Find Datasets button
+ window.grid_rowconfigure(3, weight=0)
+ msg = "Select Mission Group(s) and Instrument Type(s) and press:"
+ label30 = tk.Label(window, text=msg)
+ label30.grid(row=3, column=0, sticky="nse")
+
+ button31 = tk.Button(window, text="1. Find Datasets", bg="#AFEEAF", command=find_datasets, width=30)
+ button31.grid(row=3, column=1, sticky="nsw")
+
+ # Row 4 - Dataset labels
+ window.grid_rowconfigure(4, weight=0)
+ cell40 = tk.Frame(window)
+ cell40.grid(row=4, columnspan=2, sticky="new")
+ for i in range(3):
+ cell40.grid_rowconfigure(i, weight=1)
+ cell40.grid_columnconfigure(0, weight=1)
+ label_groups = tk.Label(cell40, text="Selected Mission Groups:", justify="left", anchor="w")
+ label_groups.grid(row=0, sticky="new")
+ label_instruments = tk.Label(cell40, text="Selected Instrument Types:", justify="left", anchor="w")
+ label_instruments.grid(row=1, sticky="new")
+ label_2 = tk.Label(cell40, text="Datasets:", justify="left", anchor="w")
+ label_2.grid(row=2, sticky="new")
+
+ # Row 5 - Datasets
+ cell50 = tk.Frame(window)
+ cell50.grid(row=5, columnspan=2, sticky="nsew")
+ dataset_list = tk.Listbox(cell50, selectmode=tk.MULTIPLE, exportselection=False)
+ dataset_list.pack(side=tk.TOP, fill=tk.BOTH, padx=5, pady=5, ipadx=5, ipady=5, expand=True)
+ scrollbar50 = tk.Scrollbar(dataset_list, orient="vertical")
+ scrollbar50.pack(side=tk.RIGHT, fill=tk.BOTH)
+ dataset_list.config(yscrollcommand=scrollbar50.set)
+ scrollbar50.config(command=dataset_list.yview)
+
+ # Row 6, 7 - Date Time
+ window.grid_rowconfigure(6, weight=0)
+ label60 = tk.Label(window, text="Date and Time (format: YYYY-MM-DD[ HH:MM:SS])", justify="left", anchor="w")
+ label60.grid(row=6, columnspan=2, sticky="new")
+
+ window.grid_rowconfigure(7, weight=0)
+ cell70 = tk.Frame(window)
+ cell70.grid(row=7, column=0, sticky="new")
+ cell70.grid_rowconfigure(0, weight=1)
+ cell70.grid_columnconfigure(0, weight=0)
+ cell70.grid_columnconfigure(1, weight=1)
+ cell70.grid_columnconfigure(2, weight=0)
+ label70 = tk.Label(cell70, text="Start Time:", justify="left", anchor="w")
+ label70.grid(row=0, column=0, sticky="wsn")
+ start_time = tk.Entry(cell70)
+ start_time.insert(0, default_start_time)
+ start_time.grid(row=0, column=1, sticky="ewsn", padx=6)
+
+ cell71 = tk.Frame(window)
+ cell71.grid(row=7, column=1, sticky="new")
+ cell71.grid_rowconfigure(0, weight=1)
+ cell71.grid_columnconfigure(0, weight=0)
+ cell71.grid_columnconfigure(1, weight=1)
+ cell71.grid_columnconfigure(2, weight=0)
+ label71 = tk.Label(cell71, text="End Time:", justify="left", anchor="w")
+ label71.grid(row=0, column=0, sticky="wsn")
+ end_time = tk.Entry(cell71)
+ end_time.insert(0, default_end_time)
+ end_time.grid(row=0, column=1, sticky="sewn", padx=6)
+
+ # Row 8 - Get File List button
+ window.grid_rowconfigure(8, weight=0)
+ msg = "Select Dataset(s) and Times and press:"
+ label80 = tk.Label(window, text=msg)
+ label80.grid(row=8, column=0, sticky="nse", pady=6)
+
+ button81 = tk.Button(window, text="2. Get File List", bg="#AFEEAF", command=find_filelist, width=30)
+ button81.grid(row=8, column=1, sticky="nsw", pady=6)
+
+ # Row 9, 10 - Files
+ window.grid_rowconfigure(9, weight=0)
+ label90 = tk.Label(window, text="Remote Files:", justify="left", anchor="w")
+ label90.grid(row=9, columnspan=2, sticky="new")
+
+ cell100 = tk.Frame(window)
+ cell100.grid(row=10, columnspan=2, sticky="nsew")
+ file_list = tk.Listbox(cell100, selectmode=tk.MULTIPLE, exportselection=False)
+ file_list.pack(side=tk.TOP, fill=tk.BOTH, padx=5, pady=5, ipadx=5, ipady=5, expand=True)
+ scrollbar100 = tk.Scrollbar(file_list, orient="vertical")
+ scrollbar100.pack(side=tk.RIGHT, fill=tk.BOTH)
+ file_list.config(yscrollcommand=scrollbar100.set)
+ scrollbar100.config(command=file_list.yview)
+
+ # Row 11 - Download Directory
+ window.grid_rowconfigure(11, weight=0)
+ cell110 = tk.Frame(window)
+ cell110.grid(row=11, columnspan=2, sticky="new")
+ cell110.grid_columnconfigure(0, weight=0)
+ cell110.grid_columnconfigure(1, weight=1)
+ cell110.grid_columnconfigure(2, weight=0)
+
+ label110 = tk.Label(cell110, text="Local Directory:", justify="left", anchor="w")
+ label110.grid(row=0, column=0, sticky="new", padx=4)
+ dir_entry = tk.Entry(cell110)
+ dir_entry.insert(0, default_dir)
+ dir_entry.grid(row=0, column=1, sticky="sewn", padx=4)
+ # dir_entry.bind("", lambda e: "break") # Make it read-only
+ button110 = tk.Button(cell110, text="Select Directory", command=select_dir, width=15)
+ button110.grid(row=0, column=2, sticky="wns", padx=4)
+
+ # Row 12 - Download Only checkbox, Get Data button, Clear, Exit
+ window.grid_rowconfigure(12, weight=0)
+
+ cell120 = tk.Frame(window)
+ cell120.grid(row=12, column=0, sticky="new", pady=4)
+ cell120.grid_columnconfigure(0, weight=0)
+ cell120.grid_columnconfigure(1, weight=1)
+ cell120.grid_columnconfigure(2, weight=0)
+ only_ch = tk.Checkbutton(cell120, text="Download Only", variable=download_box, onvalue=1, offvalue=0)
+ only_ch.grid(row=0, column=0, sticky="new", padx=4)
+ msg = "Select Files(s) and press:"
+ label120 = tk.Label(cell120, text=msg)
+ label120.grid(row=0, column=2, sticky="nse", pady=6)
+
+ cell121 = tk.Frame(window)
+ cell121.grid(row=12, column=1, sticky="new", pady=4)
+ cell121.grid_columnconfigure(0, weight=0)
+ cell121.grid_columnconfigure(1, weight=1)
+ cell121.grid_columnconfigure(2, weight=0)
+ cell121.grid_columnconfigure(3, weight=0)
+ button122 = tk.Button(cell121, text="3. Get Data", bg="#AFEEAF", command=get_data, width=30)
+ button122.grid(row=0, column=0, sticky="wns", padx=4)
+ button123 = tk.Button(cell121, text="Clear", command=clear_boxes, width=10, bg="#E9967A")
+ button123.grid(row=0, column=2, sticky="wns", padx=4)
+ button124 = tk.Button(cell121, text="Exit", command=exit_gui, width=10, bg="#E9967A")
+ button124.grid(row=0, column=3, sticky="wns", padx=4)
+
+ # Row 13 - Status bar
+ window.grid_rowconfigure(13, weight=0)
+ status_label = tk.Label(window, text=default_status, justify="left", anchor="w", relief="groove")
+ status_label.grid(row=13, columnspan=2, sticky="new", padx=2)
+
+
+def cdagui():
+ root = tk.Tk()
+ startgui = cdaWindow(root)
+ root.mainloop()
+ return startgui
+
+
+if __name__ == "__main__":
+ root = tk.Tk()
+ startgui = cdaWindow(root)
+ root.mainloop()
- def select_dir():
- """Select directory."""
- file = str(QFileDialog.getExistingDirectory(self,
- "Select Directory"))
- if file:
- self.local_dir = file
- self.dir_box.setText(self.local_dir)
-
- def clear_all():
- """Clear all boxes."""
- self.mission_box.clearSelection()
- self.instrument_box.clearSelection()
- self.instrument_selected.setText('')
- self.mission_selected.setText('')
- self.dataset_box.clear()
- self.file_box.clear()
- self.parent.statusbar.showMessage('Status: Ready')
-
- def exit_all():
- self.parent.close()
-
- # Download Files GUI elements
- self.dirGroupBox = QGroupBox("Remote Files and Download")
-
- list1 = QListWidget(self)
- list1.setMinimumHeight(50)
- list1.setMinimumWidth(400)
- list1.setSelectionMode(QListWidget.MultiSelection)
- self.file_box = list1
-
- label1 = QLabel("Download Directory:")
- dir1 = QLineEdit()
- self.local_dir = CONFIG['local_data_dir']
- dir1.setText(self.local_dir)
- self.dir_box = dir1
- button1 = QPushButton("Change Directory")
- button1.clicked.connect(select_dir)
-
- msg2 = ("If checked, then the files will"
- + " only be downloaded. If unchecked, then they will also"
- + " be read and loaded into pytplot variables.")
- check1 = QCheckBox("Download Only")
- check1.setToolTip(msg2)
- check1.setChecked(True)
- buttondown = QPushButton("3. Get Data")
- buttondown.setStyleSheet(self.button_css)
- buttondown.clicked.connect(button3_get_data)
-
- buttonclear = QPushButton("Clear")
- buttonclear.setStyleSheet(self.clear_css)
- buttonclear.clicked.connect(clear_all)
-
- buttonexit = QPushButton("Exit")
- buttonexit.setStyleSheet(self.clear_css)
- buttonexit.clicked.connect(exit_all)
-
- layout = QGridLayout()
- layout.addWidget(list1, 0, 0, 1, 6)
- layout.addWidget(label1, 1, 0, 1, 1)
- layout.addWidget(dir1, 1, 1, 1, 4)
- layout.addWidget(button1, 1, 5, 1, 1)
- layout.addWidget(check1, 2, 0, 1, 2)
- layout.addWidget(buttondown, 2, 2, 1, 2)
- layout.addWidget(buttonclear, 2, 4, 1, 1)
- layout.addWidget(buttonexit, 2, 5, 1, 1)
-
- # Button1 action
-
- self.dirGroupBox.setLayout(layout)
-
- def initUI(self):
- """Create GUI."""
- # Main layout is vertical
- grid = QVBoxLayout()
- self.setLayout(grid)
-
- # Top label
- label1 = QLabel("Download Data from CDAWeb")
- label1.setStyleSheet(self.title_css)
- label1.setAlignment(Qt.AlignCenter)
- label1.setMaximumHeight(20)
- grid.addWidget(label1)
-
- # 1. Create missions and instruments group
- self.createMissionGroupBox()
- grid.addWidget(self.missionGroupBox)
-
- # 2. Create dataset group
- self.createDatasetBox()
- grid.addWidget(self.datasetGroupBox)
-
- # 3. Create datetime group
- self.createTimeGroupBox()
- grid.addWidget(self.timeGroupBox)
-
- # 4. Create download group
- self.createDownloadGroupBox()
- grid.addWidget(self.dirGroupBox)
-
- self.showMaximized()
-
-
-if __name__ == '__main__':
- app = QCoreApplication.instance()
- if app is None:
- app = QApplication(sys.argv)
-
- cdagui = cdagui()
- sys.exit(app.exec_())
diff --git a/pyspedas/cdagui/cdaweb.py b/pyspedas/cdagui/cdaweb.py
index 94576be3..cfd1ef3f 100644
--- a/pyspedas/cdagui/cdaweb.py
+++ b/pyspedas/cdagui/cdaweb.py
@@ -6,6 +6,7 @@
https://cdaweb.gsfc.nasa.gov/WebServices/REST/py/cdasws/index.html
"""
+import logging
import os
import re
from cdasws import CdasWs
@@ -137,20 +138,20 @@ def cda_download(self, remote_files, local_dir, download_only=False,
msg = "cdf_to_tplot could not load " + localfile
msg += "\n\n"
msg += "Error from pytplot: " + str(err)
- print(msg)
+ logging.error(msg)
tplot_loaded = 0
else:
- print(str(count) + '. There was a problem. Could not download \
+ logging.error(str(count) + '. There was a problem. Could not download \
file: ' + remotef)
tplot_loaded = -1
localfile = ''
result.append([remotef, localfile, tplot_loaded])
- print('Downloaded ' + str(dcount) + ' files.')
+ logging.info('Downloaded ' + str(dcount) + ' files.')
if not download_only:
loaded_vars = list(set(loaded_vars))
- print('tplot variables:')
+ logging.info('tplot variables:')
for var in loaded_vars:
- print(var)
+ logging.info(var)
return result
diff --git a/pyspedas/cluster/__init__.py b/pyspedas/cluster/__init__.py
index b2486aa1..0acd40cb 100644
--- a/pyspedas/cluster/__init__.py
+++ b/pyspedas/cluster/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def fgm(trange=['2018-11-5', '2018-11-6'],
probe='1',
@@ -60,6 +61,7 @@ def fgm(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='fgm', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def aspoc(trange=['2018-11-5', '2018-11-6'],
probe='1',
datatype='pp',
@@ -119,6 +121,7 @@ def aspoc(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='aspoc', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def cis(trange=['2018-11-5', '2018-11-6'],
probe='1',
datatype='pp',
@@ -178,6 +181,7 @@ def cis(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='cis', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def dwp(trange=['2018-11-5', '2018-11-6'],
probe='1',
datatype='pp',
@@ -237,6 +241,7 @@ def dwp(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='dwp', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def edi(trange=['2018-11-5', '2018-11-6'],
probe='1',
datatype='pp',
@@ -296,6 +301,7 @@ def edi(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='edi', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def efw(trange=['2018-11-5', '2018-11-6'],
probe='1',
datatype='pp',
@@ -355,6 +361,7 @@ def efw(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='efw', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def peace(trange=['2016-11-5', '2016-11-6'],
probe='1',
datatype='pp',
@@ -414,6 +421,7 @@ def peace(trange=['2016-11-5', '2016-11-6'],
"""
return load(instrument='peace', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def rapid(trange=['2016-11-5', '2016-11-6'],
probe='1',
datatype='pp',
@@ -473,6 +481,7 @@ def rapid(trange=['2016-11-5', '2016-11-6'],
"""
return load(instrument='rapid', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def staff(trange=['2012-11-5', '2012-11-6'],
probe='1',
datatype='pp',
@@ -532,6 +541,7 @@ def staff(trange=['2012-11-5', '2012-11-6'],
"""
return load(instrument='staff', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def wbd(trange=['2012-11-6', '2012-11-7'],
probe='1',
datatype='waveform',
@@ -591,6 +601,7 @@ def wbd(trange=['2012-11-6', '2012-11-7'],
"""
return load(instrument='wbd', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
def whi(trange=['2012-11-5', '2012-11-6'],
probe='1',
datatype='pp',
@@ -649,3 +660,7 @@ def whi(trange=['2012-11-5', '2012-11-6'],
"""
return load(instrument='whi', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, no_update=no_update, time_clip=time_clip)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Cluster', instrument=instrument, label=label)
diff --git a/pyspedas/cluster/load.py b/pyspedas/cluster/load.py
index 4798dd62..229580b5 100644
--- a/pyspedas/cluster/load.py
+++ b/pyspedas/cluster/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/cluster/load_csa.py b/pyspedas/cluster/load_csa.py
index 774863ee..fabfb8ad 100644
--- a/pyspedas/cluster/load_csa.py
+++ b/pyspedas/cluster/load_csa.py
@@ -8,9 +8,10 @@
We download the tar.gr file directly, without using pyspedas.download().
"""
-from pyspedas.analysis.time_clip import time_clip as tclip
-from pyspedas.utilities.time_string import time_string
-from pyspedas.utilities.time_double import time_double
+import logging
+from pytplot import time_clip as tclip
+from pytplot import time_string
+from pytplot import time_double
from pytplot import cdf_to_tplot
import requests
@@ -111,18 +112,24 @@ def load_csa(trange=['2001-02-01', '2001-02-03'],
# Delivery interval
delivery_interval = 'ALL'
+ if not probes:
+ return tvars
+
+ if not datatypes:
+ return tvars
+
+ if not isinstance(probes, list):
+ probes = [probes]
+
+ if not isinstance(datatypes, list):
+ datatypes = [datatypes]
+
# TODO: Create a function that can resolve wildcards
# similar to IDL spedas ssl_check_valid_name
# my_datatypes=ssl_check_valid_name(uc_datatypes,master_datatypes)
# my_probes=ssl_check_valid_name(uc_probes,master_probes)
- if not probes: # list is empty
- return tvars
- elif probes[0] == '*': # load all probes
+ if probes[0] == '*': # load all probes
probes = cl_master_probes()
- if not datatypes: # list is empty
- return tvars
- elif datatypes[0] == '*': # load all probes
- datatypes = cl_master_datatypes()
# Construct the query string
base_url = 'https://csa.esac.esa.int/csa-sl-tap/data?'
@@ -141,20 +148,20 @@ def load_csa(trange=['2001-02-01', '2001-02-03'],
local_path = CONFIG['local_data_dir']
Path(local_path).mkdir(parents=True, exist_ok=True)
- out_gz = local_path + 'temp_cluster_file.tar.gz' # Temp file name
+ out_gz = os.path.join(local_path, 'temp_cluster_file.tar.gz') # Temp file name
# Download the file.
- print("Downloading Cluster data, please wait....")
+ logging.info("Downloading Cluster data, please wait....")
try:
r = requests.get(url, allow_redirects=True)
r.raise_for_status()
except requests.exceptions.HTTPError as err:
- print("Download HTTP error: ", err)
+ logging.error("Download HTTP error: " + str(err))
return tvars
except requests.exceptions.RequestException as e:
- print("Download error: ", e)
+ logging.error("Download error: " + str(e))
return tvars
- print("Download complete.")
+ logging.info("Download complete.")
# Open the downloaded file.
with open(out_gz, 'wb') as w:
@@ -171,30 +178,19 @@ def load_csa(trange=['2001-02-01', '2001-02-03'],
# Get unique set of files.
f_set = set(f)
# File list with full path.
- out_files = [local_path+s for s in list(f_set)]
+ out_files = [os.path.join(local_path, s) for s in list(f_set)]
out_files = sorted(out_files)
if downloadonly:
return out_files
# Load data into tplot
- try:
- tvars = cdf_to_tplot(out_files,
- suffix=suffix,
- get_support_data=get_support_data,
- varformat=varformat,
- varnames=varnames,
- notplot=notplot)
- except IndexError as e:
- print("cdf_to_tplot cannot load Cluster cdf file.")
- print("File: ", out_files[0])
- print("IndexError:", e)
- return tvars
- except TypeError as e:
- print("cdf_to_tplot cannot load Cluster cdf file.")
- print("File: ", out_files[0])
- print("TypeError:", e)
- return tvars
+ tvars = cdf_to_tplot(out_files,
+ suffix=suffix,
+ get_support_data=get_support_data,
+ varformat=varformat,
+ varnames=varnames,
+ notplot=notplot)
if notplot:
return tvars
diff --git a/pyspedas/cluster/tests/tests.py b/pyspedas/cluster/tests/tests.py
index 10cc5b63..a0e56069 100644
--- a/pyspedas/cluster/tests/tests.py
+++ b/pyspedas/cluster/tests/tests.py
@@ -1,13 +1,28 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+from pyspedas.cluster.load_csa import load_csa, cl_master_probes, cl_master_datatypes
+
class LoadTestCases(unittest.TestCase):
+ def test_csa(self):
+ dtypes = cl_master_datatypes()
+ probes = cl_master_probes()
+ self.assertTrue('CP_FGM_SPIN' in dtypes)
+ self.assertTrue('C1' in probes)
+
+ def test_load_fgm_data_csa(self):
+ mag_vars = load_csa(datatypes=['CP_FGM_SPIN'], probes=None) # returns empty list
+ mag_vars = load_csa(datatypes=None) # returns empty list
+ mag_vars = load_csa(datatypes='CP_FGM_SPIN', probes='*')
+ mag_vars = load_csa(datatypes=['CP_FGM_SPIN'], notplot=True)
+ self.assertTrue(data_exists('B_vec_xyz_gse__C1_CP_FGM_SPIN'))
+ self.assertTrue(data_exists('B_mag__C1_CP_FGM_SPIN'))
+ self.assertTrue(data_exists('sc_pos_xyz_gse__C1_CP_FGM_SPIN'))
+
def test_load_fgm_data(self):
- mag_vars = pyspedas.cluster.fgm()
+ mag_vars = pyspedas.cluster.fgm(time_clip=True)
self.assertTrue(data_exists('B_xyz_gse__C1_UP_FGM'))
def test_load_fgm_cp_data(self):
@@ -25,11 +40,10 @@ def test_load_cis_data(self):
def test_load_dwp_data(self):
dwp_vars = pyspedas.cluster.dwp()
self.assertTrue(data_exists('Correl_freq__C1_PP_DWP'))
-
- # crash loading the default data
- # def test_load_edi_data(self):
- # edi_vars = pyspedas.cluster.edi()
- # self.assertTrue(data_exists(''))
+
+ def test_load_edi_data(self):
+ edi_vars = pyspedas.cluster.edi(downloadonly=True)
+ self.assertTrue(isinstance(edi_vars, list))
def test_load_efw_data(self):
efw_vars = pyspedas.cluster.efw()
@@ -47,10 +61,9 @@ def test_load_sta_data(self):
sta_vars = pyspedas.cluster.staff()
self.assertTrue(data_exists('E_pow_f2__C1_PP_STA'))
- # large files
- # def test_load_wbd_data(self):
- # wbd_vars = pyspedas.cluster.wbd()
- # self.assertTrue(data_exists(''))
+ def test_load_wbd_data(self):
+ wbd_vars = pyspedas.cluster.wbd(trange=['2012-11-6/02:10', '2012-11-6/02:15'], notplot=True)
+ self.assertTrue('WBD_Elec' in wbd_vars)
def test_load_whi_data(self):
whi_vars = pyspedas.cluster.whi()
@@ -58,4 +71,4 @@ def test_load_whi_data(self):
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/cnofs/README.md b/pyspedas/cnofs/README.md
new file mode 100644
index 00000000..7db3035e
--- /dev/null
+++ b/pyspedas/cnofs/README.md
@@ -0,0 +1,43 @@
+
+## Communications/Navigation Outage Forecasting System (C/NOFS)
+The routines in this module can be used to load data from the Communications/Navigation Outage Forecasting System (C/NOFS) mission.
+
+### Instruments
+- Coupled Ion-Neutral Dynamics Investigation (CINDI)
+- Planar Langmuir Probe (PLP)
+- Vector Electric Field Instrument (VEFI)
+
+### Examples
+Get started by importing pyspedas and tplot; these are required to load and plot the data:
+
+```python
+import pyspedas
+from pytplot import tplot
+```
+
+#### Coupled Ion-Neutral Dynamics Investigation (CINDI)
+
+```python
+cindi_vars = pyspedas.cnofs.cindi(trange=['2013-11-5', '2013-11-6'])
+
+tplot(['ionVelocityX', 'ionVelocityY', 'ionVelocityZ'])
+```
+
+#### Planar Langmuir Probe (PLP)
+
+```python
+plp_vars = pyspedas.cnofs.plp(trange=['2010-11-5', '2010-11-6'])
+
+tplot('Ni')
+```
+
+#### Vector Electric Field Instrument (VEFI)
+
+```python
+vefi_vars = pyspedas.cnofs.vefi(trange=['2013-11-5', '2013-11-6'])
+
+tplot(['E_meridional', 'E_zonal'])
+```
+
+
+
diff --git a/pyspedas/cnofs/__init__.py b/pyspedas/cnofs/__init__.py
new file mode 100644
index 00000000..a014bd45
--- /dev/null
+++ b/pyspedas/cnofs/__init__.py
@@ -0,0 +1,181 @@
+from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
+
+def cindi(trange=['2013-11-5', '2013-11-6'],
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Coupled Ion-Neutral Dynamics Investigation (CINDI)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ return load(instrument='cindi', trange=trange, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def plp(trange=['2013-11-5', '2013-11-6'],
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Planar Langmuir Probe (PLP)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ return load(instrument='plp', trange=trange, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def vefi(trange=['2010-11-5', '2010-11-6'],
+ datatype='efield_1sec',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Vector Electric Field Instrument (VEFI)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ String specifying datatype (options: 'efield_1sec', 'bfield_1sec', 'ld_500msec')
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ return load(instrument='vefi', datatype=datatype, trange=trange, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='CNOFS', instrument=instrument, label=label)
diff --git a/pyspedas/cnofs/config.py b/pyspedas/cnofs/config.py
new file mode 100644
index 00000000..03e25a49
--- /dev/null
+++ b/pyspedas/cnofs/config.py
@@ -0,0 +1,11 @@
+import os
+
+CONFIG = {'local_data_dir': 'cnofs_data/',
+ 'remote_data_dir': 'https://spdf.gsfc.nasa.gov/pub/data/cnofs/'}
+
+# override local data directory with environment variables
+if os.environ.get('SPEDAS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'cnofs'])
+
+if os.environ.get('CNOFS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.environ['CNOFS_DATA_DIR']
\ No newline at end of file
diff --git a/pyspedas/cnofs/load.py b/pyspedas/cnofs/load.py
new file mode 100644
index 00000000..45c89339
--- /dev/null
+++ b/pyspedas/cnofs/load.py
@@ -0,0 +1,59 @@
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pytplot import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+
+def load(trange=['2013-11-5', '2013-11-6'],
+ instrument='cindi',
+ datatype='efield_1sec',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the CNOFS mission; this function is not meant
+ to be called directly; instead, see the wrappers:
+ pyspedas.cnofs.cindi
+ pyspedas.cnofs.plp
+ pyspedas.cnofs.vefi
+ """
+
+ if instrument == 'cindi':
+ pathformat = instrument+'/ivm_500ms_cdf/%Y/cnofs_'+instrument+'_ivm_500ms_%Y%m%d_v??.cdf'
+ elif instrument == 'plp':
+ pathformat = instrument+'/plasma_1sec/%Y/cnofs_'+instrument+'_plasma_1sec_%Y%m%d_v??.cdf'
+ elif instrument == 'vefi':
+ pathformat = instrument+'/'+datatype+'/%Y/cnofs_'+instrument+'_'+datatype+'_%Y%m%d_v??.cdf'
+
+ # find the full remote path names using the trange
+ remote_names = dailynames(file_format=pathformat, trange=trange)
+
+ out_files = []
+
+ files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update)
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ out_files = sorted(out_files)
+
+ if downloadonly:
+ return out_files
+
+ tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
+
+ if notplot:
+ return tvars
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
diff --git a/pyspedas/cnofs/tests/__init__.py b/pyspedas/cnofs/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/cnofs/tests/tests.py b/pyspedas/cnofs/tests/tests.py
new file mode 100644
index 00000000..ab9eaf5c
--- /dev/null
+++ b/pyspedas/cnofs/tests/tests.py
@@ -0,0 +1,33 @@
+import os
+import unittest
+from pytplot import data_exists
+import pyspedas
+
+
+class LoadTestCases(unittest.TestCase):
+ def test_load_cindi_data(self):
+ c_vars = pyspedas.cnofs.cindi(time_clip=True)
+ self.assertTrue(data_exists('ionVelocityX'))
+ self.assertTrue(data_exists('ionVelocityY'))
+ self.assertTrue(data_exists('ionVelocityZ'))
+
+ def test_load_plp_data(self):
+ l_vars = pyspedas.cnofs.plp()
+ self.assertTrue(data_exists('Ni'))
+
+ def test_load_vefi_data(self):
+ l_vars = pyspedas.cnofs.vefi()
+ self.assertTrue(data_exists('E_meridional'))
+ self.assertTrue(data_exists('E_zonal'))
+
+ def test_load_notplot(self):
+ c_vars = pyspedas.cnofs.cindi(notplot=True)
+ self.assertTrue('ionVelocityX' in c_vars)
+
+ def test_downloadonly(self):
+ files = pyspedas.cnofs.cindi(downloadonly=True, trange=['2013-2-15', '2013-2-16'])
+ self.assertTrue(os.path.exists(files[0]))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/cotrans/cotrans.py b/pyspedas/cotrans/cotrans.py
index 76830033..3433d966 100644
--- a/pyspedas/cotrans/cotrans.py
+++ b/pyspedas/cotrans/cotrans.py
@@ -10,10 +10,11 @@
-----
This function is similar to cotrans.pro of IDL SPEDAS.
"""
+import logging
import pytplot
+from pytplot import get_coords,set_coords
from pyspedas.cotrans.cotrans_lib import subcotrans
-from pyspedas.cotrans.cotrans_get_coord import cotrans_get_coord
-from pyspedas.cotrans.cotrans_set_coord import cotrans_set_coord
+
def cotrans(name_in=None, name_out=None, time_in=None, data_in=None,
coord_in=None, coord_out=None):
@@ -43,21 +44,39 @@ def cotrans(name_in=None, name_out=None, time_in=None, data_in=None,
Fills a new pytplot variable with data in the coord_out system.
"""
if coord_out is None:
- print("cotrans error: No output coordinates were provided.")
+ logging.error("cotrans error: No output coordinates were provided.")
return 0
+ # Input data may be specified as a bare array rather than a tplot variable
+ if not (name_in is None):
+ var_coord_in = get_coords(name_in)
+ else:
+ var_coord_in = None
+
+ # If the input coordinate system is supplied as an argument, and the tplot variable has a coordinate system
+ # specified in its metadata, check that they match, and if not, log the error and return failure.
+
+ if not (var_coord_in is None) and not(coord_in is None):
+ if var_coord_in.lower() != coord_in.lower():
+ logging.error("cotrans error: " + name_in + " has " +
+ var_coord_in.lower() + " coordinates, but transform from " + coord_in.lower() + " was requested.")
+ return 0
+
if coord_in is None:
- coord_in = cotrans_get_coord(name_in)
+ coord_in = var_coord_in
if coord_in is None:
- print("cotrans error: No input coordinates were provided.")
+ logging.error("cotrans error: No input coordinates were provided.")
return 0
coord_in = coord_in.lower()
coord_out = coord_out.lower()
all_coords = ["gse", "gsm", "sm", "gei", "geo", "mag", "j2000"]
- if coord_in not in all_coords or coord_out not in all_coords:
- print("cotrans error: Requested coordinate system not supported.")
+ if coord_in not in all_coords:
+ logging.error("cotrans error: Requested input coordinate system %s not supported.",coord_in)
+ return 0
+ if coord_out not in all_coords:
+ logging.error("cotrans error: Requested output coordinate system %s not supported.",coord_out)
return 0
if name_in is not None:
@@ -66,45 +85,35 @@ def cotrans(name_in=None, name_out=None, time_in=None, data_in=None,
time_in = tplot_data[0]
data_in = tplot_data[1]
else:
- name_in = "cotranstemp"
- pytplot.store_data(name_in, data={'x': list(time_in),
- 'y': list(data_in)})
+ pytplot.store_data('cotranstemp', data={'x': list(time_in),
+ 'y': list(data_in)})
if len(data_in[:]) < 1:
- print("cotrans error: Data is empty.")
+ logging.error("cotrans error: Data is empty.")
return 0
# Perform coordinate transformation.
data_out = subcotrans(list(time_in), list(data_in), coord_in, coord_out)
+ if name_in is None and name_out is None:
+ return data_out
+
+ if name_in is None:
+ name_in = 'cotranstemp'
+
# Find the name of the output pytplot variable.
if name_out is None:
- # If no output tplot name is provided, create one.
- if name_in is None:
- name_out = "data_out_" + coord_out
- else:
- name_out = name_in + "_" + coord_out
+ name_out = name_in + "_" + coord_out
# Save output pytplot variable.
pytplot.tplot_copy(name_in, name_out)
pytplot.data_quants[name_out].data = data_out
# We should change an attribute for the coordinate system.
- cotrans_set_coord(name_out, coord_out.upper())
-
- # should also update the legend, if it includes the coordinate system
- # for this to work, the coordinate system should be in all upper case
- metadata = pytplot.get_data(name_out, metadata=True)
- if metadata.get('plot_options') is not None:
- if metadata['plot_options'].get('yaxis_opt') is not None:
- if metadata['plot_options']['yaxis_opt'].get('legend_names') is not None:
- legend = metadata['plot_options']['yaxis_opt'].get('legend_names')
- updated_legend = [item.replace(coord_in.upper(), coord_out.upper()) for item in legend]
- metadata['plot_options']['yaxis_opt']['legend_names'] = updated_legend
- if metadata['plot_options']['yaxis_opt'].get('axis_label') is not None:
- ytitle = metadata['plot_options']['yaxis_opt'].get('axis_label')
- metadata['plot_options']['yaxis_opt']['axis_label'] = ytitle.replace(coord_in.upper(), coord_out.upper())
-
- print("Output variable: " + name_out)
+ set_coords(name_out, coord_out.upper())
+
+ # Code to update the legend and axis labels has been moved into cotrans_set_coord().
+
+ logging.info("Output variable: " + name_out)
return 1
diff --git a/pyspedas/cotrans/cotrans_get_coord.py b/pyspedas/cotrans/cotrans_get_coord.py
index 26e83638..a9697942 100644
--- a/pyspedas/cotrans/cotrans_get_coord.py
+++ b/pyspedas/cotrans/cotrans_get_coord.py
@@ -1,33 +1,6 @@
-
-from pytplot import get_data
+import logging
+from pytplot import get_coords
def cotrans_get_coord(name):
- '''
- This function returns the coordinate system of a tplot variable
-
- Parameters:
- name: str
- name of the tplot variable
-
- Notes:
- The coordinate system is stored in the variable's metadata at:
- metadata['data_att']['coord_sys']
-
- See cotrans_set_coord to update the coordinate system
-
- Returns:
- Coordinate system of the tplot variable
- or
- None if the coordinate system isn't set
- '''
-
- metadata = get_data(name, metadata=True)
- if metadata is None:
- return None
-
- if metadata.get('data_att'):
- if metadata['data_att'].get('coord_sys'):
- return metadata['data_att']['coord_sys']
-
- print('Coordinate system not found: ' + name)
- return None
\ No newline at end of file
+ logging.info("cotrans_get_coord is now a wrapper for pytplot.get_coords(). This version will eventually be removed.")
+ return get_coords(name)
diff --git a/pyspedas/cotrans/cotrans_lib.py b/pyspedas/cotrans/cotrans_lib.py
index a93f069f..a657fbce 100644
--- a/pyspedas/cotrans/cotrans_lib.py
+++ b/pyspedas/cotrans/cotrans_lib.py
@@ -12,6 +12,7 @@
For a comparison to IDL, see: http://spedas.org/wiki/index.php?title=Cotrans
"""
import numpy as np
+import logging
from datetime import datetime
from pyspedas.cotrans.igrf import set_igrf_params
from pyspedas.cotrans.j2000 import set_j2000_params
@@ -133,7 +134,7 @@ def cdipdir(time_in=None, iyear=None, idoy=None):
Same as SPEDAS cdipdir.
"""
if (time_in is None) and (iyear is None) and (idoy is None):
- print("Error: No time was provided.")
+ logging.error("Error: No time was provided.")
return
if (iyear is None) or (idoy is None):
@@ -329,7 +330,7 @@ def subgei2gse(time_in, data_in):
"""
xgse, ygse, zgse = tgeigse_vect(time_in, data_in)
- print("Running transformation: subgei2gse")
+ logging.info("Running transformation: subgei2gse")
return np.column_stack([xgse, ygse, zgse])
@@ -396,7 +397,7 @@ def subgse2gei(time_in, data_in):
"""
xgei, ygei, zgei = tgsegei_vect(time_in, data_in)
- print("Running transformation: subgse2gei")
+ logging.info("Running transformation: subgse2gei")
return np.column_stack([xgei, ygei, zgei])
@@ -476,7 +477,7 @@ def subgse2gsm(time_in, data_in):
"""
xgsm, ygsm, zgsm = tgsegsm_vect(time_in, data_in)
- print("Running transformation: subgse2gsm")
+ logging.info("Running transformation: subgse2gsm")
return np.column_stack([xgsm, ygsm, zgsm])
@@ -557,7 +558,7 @@ def subgsm2gse(time_in, data_in):
"""
xgse, ygse, zgse = tgsmgse_vect(time_in, data_in)
- print("Running transformation: subgsm2gse")
+ logging.info("Running transformation: subgsm2gse")
return np.column_stack([xgse, ygse, zgse])
@@ -629,7 +630,7 @@ def subgsm2sm(time_in, data_in):
"""
xsm, ysm, zsm = tgsmsm_vect(time_in, data_in)
- print("Running transformation: subgsm2sm")
+ logging.info("Running transformation: subgsm2sm")
return np.column_stack([xsm, ysm, zsm])
@@ -701,7 +702,7 @@ def subsm2gsm(time_in, data_in):
"""
xgsm, ygsm, zgsm = tsmgsm_vect(time_in, data_in)
- print("Running transformation: subsm2gsm")
+ logging.info("Running transformation: subsm2gsm")
return np.column_stack([xgsm, ygsm, zgsm])
@@ -733,7 +734,7 @@ def subgei2geo(time_in, data_in):
ygeo = -sgst * xgei + cgst * ygei
zgeo = zgei
- print("Running transformation: subgei2geo")
+ logging.info("Running transformation: subgei2geo")
return np.column_stack([xgeo, ygeo, zgeo])
@@ -765,7 +766,7 @@ def subgeo2gei(time_in, data_in):
ygei = sgst * xgeo + cgst * ygeo
zgei = zgeo
- print("Running transformation: subgeo2gei")
+ logging.info("Running transformation: subgeo2gei")
return np.column_stack([xgei, ygei, zgei])
@@ -826,7 +827,7 @@ def subgeo2mag(time_in, data_in):
mlat[1, 1] = 1.0
mag[i] = mlat @ out
- print("Running transformation: subgeo2mag")
+ logging.info("Running transformation: subgeo2mag")
return mag
@@ -886,7 +887,7 @@ def submag2geo(time_in, data_in):
glong[2, 2] = 1.0
geo[i] = glong @ out
- print("Running transformation: submag2geo")
+ logging.info("Running transformation: submag2geo")
return geo
@@ -1097,7 +1098,7 @@ def subgei2j2000(time_in, data_in):
cmatrix = j2000_matrix_vec(time_in)
d_out = ctv_mx_vec_rot(cmatrix, d)
- print("Running transformation: subgei2j2000")
+ logging.info("Running transformation: subgei2j2000")
return np.transpose(d_out)
@@ -1125,7 +1126,7 @@ def subj20002gei(time_in, data_in):
icmatrix = np.transpose(cmatrix, (1, 0, 2))
d_out = ctv_mx_vec_rot(icmatrix, d)
- print("Running transformation: subj20002gei")
+ logging.info("Running transformation: subj20002gei")
return np.transpose(d_out)
@@ -1257,19 +1258,23 @@ def subcotrans(time_in, data_in, coord_in, coord_out):
coord_in = coord_in.lower()
coord_out = coord_out.lower()
- if (coord_in not in coord_all) or (coord_out not in coord_all):
- print("Error: coordinate system cannot be found.")
+ if coord_in not in coord_all:
+ logging.error("Unknown coord_in value %s",coord_in)
+ return None
+
+ if coord_out not in coord_all:
+ logging.error("Unknown coord_out value %s",coord_out)
return None
if coord_in == coord_out:
- print("Warning: coord_in equal to coord_out.")
+ logging.warning("Warning: coord_in equal to coord_out.")
return data_out
# Construct a list of transformations.
p = find_path_t1_t2(coord_in, coord_out)
p = shorten_path_t1_t2(p)
p = shorten_path_t1_t2(p)
- print(p)
+ logging.info(p)
# Daisy chain the list of transformations.
for i in range(len(p)-1):
diff --git a/pyspedas/cotrans/cotrans_set_coord.py b/pyspedas/cotrans/cotrans_set_coord.py
index 50d8cdc9..e97c5a3d 100644
--- a/pyspedas/cotrans/cotrans_set_coord.py
+++ b/pyspedas/cotrans/cotrans_set_coord.py
@@ -1,36 +1,7 @@
+import logging
+from pytplot import set_coords
-from pytplot import get_data
def cotrans_set_coord(name, coord):
- '''
- This function sets the coordinate system of a tplot variable
-
- Parameters:
- name: str
- name of the tplot variable
-
- Notes:
- The coordinate system is stored in the variable's metadata at:
- metadata['data_att']['coord_sys']
-
- See cotrans_get_coord to return the coordinate system
-
- Returns:
- bool: True/False depending on if the operation was successful
- '''
-
- # check that the variable exists
- data = get_data(name)
- if data is None:
- return False
-
- metadata = get_data(name, metadata=True)
-
- if metadata.get('data_att') is None:
- metadata['data_att'] = {}
-
- # note: updating the metadata dict directly updates
- # the variable's metadata in memory, so there's
- # no need to update the variable with store_data
- metadata['data_att'] = {'coord_sys': coord}
- return True
\ No newline at end of file
+ logging.info("cotrans_set_coord is now a wrapper for pytplot.set_coords(). This version will eventually be removed.")
+ return set_coords(name,coord)
diff --git a/pyspedas/cotrans/fac_matrix_make.py b/pyspedas/cotrans/fac_matrix_make.py
index 7d03160d..f63e592a 100644
--- a/pyspedas/cotrans/fac_matrix_make.py
+++ b/pyspedas/cotrans/fac_matrix_make.py
@@ -2,8 +2,8 @@
import logging
import numpy as np
-from pyspedas.analysis.tnormalize import tnormalize
-from pyspedas.analysis.tcrossp import tcrossp
+from pytplot import tnormalize
+from pytplot import tcrossp
from pytplot import get_data, store_data
diff --git a/pyspedas/cotrans/matrix_array_lib.py b/pyspedas/cotrans/matrix_array_lib.py
new file mode 100644
index 00000000..f7a9b8b8
--- /dev/null
+++ b/pyspedas/cotrans/matrix_array_lib.py
@@ -0,0 +1,166 @@
+import numpy as np
+
+# use nansum from bottleneck if it's installed, otherwise use the numpy one
+try:
+ import bottleneck as bn
+ nansum = bn.nansum
+except ImportError:
+ nansum = np.nansum
+
+
+def ctv_err_test(vals, val, err = 1e-5):
+ """ used to determine if values are equal within some standard of computational error """
+ return (vals >= val-err) & (vals <= val+err)
+
+
+def ctv_determ_mats(m):
+ """ returns the determinant of a list of 3x3 matrices """
+ return np.linalg.det(m)
+
+
+def ctv_identity_mats(m):
+ """
+ determines if a list of 3x3 matrices are identity matrices
+ will return the indexes of the identity matrices in the list of matrices
+ """
+ return ctv_err_test(m[:, 0, 0], 1) & ctv_err_test(m[:, 0, 1], 0) & ctv_err_test(m[:, 0, 2], 0) \
+ & ctv_err_test(m[:, 1, 0], 0) & ctv_err_test(m[:, 1, 1], 1) & ctv_err_test(m[:, 1, 2], 0) \
+ & ctv_err_test(m[:, 2, 0], 0) & ctv_err_test(m[:, 2, 1], 0) & ctv_err_test(m[:, 2, 2], 1)
+
+
+def ctv_mm_mult(m1, m2):
+ """ multiplication of two lists of 3x3 matrices """
+ out = np.zeros(m1.shape)
+ out[:, 0, 0] = nansum(m1[:, 0, :] * m2[:, :, 0], axis=1)
+ out[:, 1, 0] = nansum(m1[:, 1, :] * m2[:, :, 0], axis=1)
+ out[:, 2, 0] = nansum(m1[:, 2, :] * m2[:, :, 0], axis=1)
+ out[:, 0, 1] = nansum(m1[:, 0, :] * m2[:, :, 1], axis=1)
+ out[:, 1, 1] = nansum(m1[:, 1, :] * m2[:, :, 1], axis=1)
+ out[:, 2, 1] = nansum(m1[:, 2, :] * m2[:, :, 1], axis=1)
+ out[:, 0, 2] = nansum(m1[:, 0, :] * m2[:, :, 2], axis=1)
+ out[:, 1, 2] = nansum(m1[:, 1, :] * m2[:, :, 2], axis=1)
+ out[:, 2, 2] = nansum(m1[:, 2, :] * m2[:, :, 2], axis=1)
+ return out
+
+
+def ctv_verify_mats(m):
+ """
+ verifies whether a list of matrices
+ contains valid rotation matrices.
+ This is determined using 2 constraints.
+ #1 Where determ(matrix) eq 1
+ #2 Where matrix#transpose(matrix) eq I
+
+ returns 0 if the matrices use a mixed system
+ returns 1 if there are no valid mats
+ returns 2 if the data are all nans
+ returns 3 if there are some invalid mats
+ returns 4 if there are some nans
+ returns 5 win!
+ """
+ identity_mats = ctv_identity_mats(ctv_mm_mult(m, np.transpose(m, (0,2,1))))
+ # make sure matrix is self-inverting and the determinate is either 1 in all cases or -1 in all cases
+ idx = np.argwhere(ctv_err_test(ctv_determ_mats(m),1) & identity_mats)
+ c_right = idx.shape[0]
+ idx = np.argwhere(ctv_err_test(ctv_determ_mats(m),-1) & identity_mats)
+ c_left = idx.shape[0]
+ idx = np.argwhere(~np.isfinite(ctv_determ_mats(m)))
+ c_nan = idx.shape[0]
+ if (c_left != 0) and (c_right != 0): # mixed system
+ return 0
+ elif (c_left == 0) and (c_right == 0): # all matrices fail
+ return 1
+ elif c_nan == m.shape[0]: # all nans
+ return 2
+ elif (c_left+c_right+c_nan < 0): # some matrices fail
+ return 3
+ elif c_nan != 0: # some nans
+ return 4
+ else: # all mats are rotation mats and there is no missing data
+ return 5
+
+
+def ctv_left_mats(m):
+ """
+ Is this a set of left-handed permutation matrices?
+ """
+ idx = np.argwhere(ctv_err_test(ctv_determ_mats(m),-1))
+ c = idx.shape[0]
+ if c > 0:
+ return 1
+ else:
+ return 0
+
+
+def ctv_swap_hands(m):
+ """
+ Turns a 3x3 matrix with a left-handed basis into a right-handed basis and vice-versa
+ """
+ out = m.copy()
+ out[:,0,:] *= -1
+ return out
+
+
+def ctv_norm_vec_rot(v):
+ """
+ Helper function
+ Calculates the norm of a bunch of vectors simultaneously
+ """
+ if v is None:
+ return -1
+ if v.ndim != 2:
+ return -1
+ return np.sqrt(np.sum(v**2, axis=1))
+
+
+def ctv_normalize_vec_rot(v):
+ """
+ Helper function
+ Normalizes a bunch of vectors simultaneously
+ """
+ if v is None:
+ return -1
+ if v.ndim != 2:
+ return -1
+ n_a = ctv_norm_vec_rot(v)
+ if (n_a.ndim == 0) and (n_a == -1):
+ return -1
+ v_s = v.shape
+ # calculation is pretty straight forward
+ # we turn n_a into an N x D so computation can be done element by element
+ n_b = np.repeat(n_a, v_s[1]).reshape(v_s)
+ return v/n_b
+
+
+def ctv_mx_vec_rot(m, x):
+ """
+ Helper function
+ Vectorized fx to multiply n matrices by n vectors
+ """
+ # input checks
+ if m is None:
+ return -1
+ if x is None:
+ return -1
+ m_s = m.shape
+ x_s = x.shape
+ # make sure number of dimensions in input arrays is correct
+ if len(m_s) != 3:
+ return -1
+ if len(x_s) != 2:
+ return -1
+ # make sure dimensions match
+ if not np.array_equal(x_s, [m_s[0], m_s[1]]):
+ return -1
+ if not np.array_equal(m_s, [x_s[0], x_s[1], x_s[1]]):
+ return -1
+ # calculation is pretty straight forward
+ # we turn x into an N x 3 x 3 so computation can be done element by element
+ y_t = np.repeat(x, x_s[1]*x_s[1]).reshape(x_s[0], x_s[1], x_s[1])
+ # custom multiplication requires rebin to stack vector across rows,
+ # not columns
+ y_t = np.transpose(y_t, (0, 2, 1))
+ # 9 multiplications and 3 additions per matrix
+ y = np.sum(y_t*m, axis=2)
+ return y
+
diff --git a/pyspedas/cotrans/minvar.py b/pyspedas/cotrans/minvar.py
index 8772c5d5..0f6ea7ae 100644
--- a/pyspedas/cotrans/minvar.py
+++ b/pyspedas/cotrans/minvar.py
@@ -28,12 +28,12 @@ def minvar(data):
# Min var starts here
# data must be Nx3
- vecavg = np.nanmean(data, axis=0)
+ vecavg = np.nanmean(np.nan_to_num(data, nan=0.0), axis=0)
mvamat = np.zeros((3, 3))
for i in range(3):
for j in range(3):
- mvamat[i, j] = np.nanmean(data[:, i] * data[:, j]) - vecavg[i] * vecavg[j]
+ mvamat[i, j] = np.nanmean(np.nan_to_num(data[:, i] * data[:, j], nan=0.0)) - vecavg[i] * vecavg[j]
# Calculate eigenvalues and eigenvectors
w, v = np.linalg.eigh(mvamat, UPLO='U')
@@ -41,6 +41,12 @@ def minvar(data):
# Sorting to ensure descending order
w = np.abs(w)
idx = np.flip(np.argsort(w))
+
+ # IDL compatability
+ if True:
+ if np.sum(w) == 0.0:
+ idx = [0, 2, 1]
+
w = w[idx]
v = v[:, idx]
@@ -48,13 +54,13 @@ def minvar(data):
YcrossZdotX = v[0, 0] * (v[1, 1] * v[2, 2] - v[2, 1] * v[1, 2])
if YcrossZdotX < 0:
v[:, 1] = -v[:, 1]
+ # v[:, 2] = -v[:, 2] # Should not it is being flipped at Z-axis?
# Ensure minvar direction is along +Z (for FAC system)
if v[2, 2] < 0:
v[:, 2] = -v[:, 2]
v[:, 1] = -v[:, 1]
- s = data.shape
vrot = np.array([np.dot(row, v) for row in data])
- return vrot, v, w
\ No newline at end of file
+ return vrot, v, w
diff --git a/pyspedas/cotrans/minvar_matrix_make.py b/pyspedas/cotrans/minvar_matrix_make.py
index 24a0b1fa..ced9a5f4 100644
--- a/pyspedas/cotrans/minvar_matrix_make.py
+++ b/pyspedas/cotrans/minvar_matrix_make.py
@@ -85,10 +85,7 @@ def minvar_matrix_make(in_var_name,
if tslide == 0:
break
- if len(data) > 2:
- o_d = {'x': o_times[:-1], 'y': o_eigs[0:-1, :, :], 'v': data.v}
- else:
- o_d = {'x': o_times[:-1], 'y': o_eigs[0:-1, :, :]}
+ o_d = {'x': o_times[:-1], 'y': o_eigs[0:-1, :, :]}
out_vars = []
diff --git a/pyspedas/cotrans/quaternions.py b/pyspedas/cotrans/quaternions.py
new file mode 100644
index 00000000..55371602
--- /dev/null
+++ b/pyspedas/cotrans/quaternions.py
@@ -0,0 +1,789 @@
+import logging
+from copy import deepcopy
+import numpy as np
+from pyspedas.utilities.interpol import interpol
+
+
+def qmult(q1, q2):
+ """
+ multiply two quaternions or two arrays of quaternions
+
+ Parameters
+ ----------
+ q1 : array_like
+ a 4 element array, or an Nx4 element array, representing quaternion(s)
+ q2 : array_like
+ a 4 element array, or an Nx4 element array, representing quaternion(s)
+
+ Returns
+ -------
+ q1*q2, or -1 on failure
+
+ Notes
+ -----
+ Implementation largely copied from the euve c library for
+ quaternions
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+ q1i = np.array(q1)
+ q2i = np.array(q2)
+
+ if q1i.ndim != q2i.ndim:
+ logging.error('Number of dimensions in quaternion q1 and quaternion q2 do not match')
+ return -1
+
+ # check to make sure input has the correct dimensions
+ q1i = qvalidate(q1i, 'q1', 'qmult')
+ q2i = qvalidate(q2i, 'q2', 'qmult')
+
+ if isinstance(q1i, int):
+ return q1i
+
+ if isinstance(q2i, int):
+ return q2i
+
+ # make sure elements match
+ if q1i.size != q2i.size:
+ logging.error('Number of elements in quaternion q1 and quaternion q2 do not match')
+ return -1
+
+ # now the actual dirty work
+ qtmp0 = q1i[:, 0] * q2i[:, 0] - q1i[:, 1] * q2i[:, 1] - q1i[:, 2] * q2i[:, 2] - q1i[:, 3] * q2i[:, 3]
+ qtmp1 = q1i[:, 1] * q2i[:, 0] + q1i[:, 0] * q2i[:, 1] - q1i[:, 3] * q2i[:, 2] + q1i[:, 2] * q2i[:, 3]
+ qtmp2 = q1i[:, 2] * q2i[:, 0] + q1i[:, 3] * q2i[:, 1] + q1i[:, 0] * q2i[:, 2] - q1i[:, 1] * q2i[:, 3]
+ qtmp3 = q1i[:, 3] * q2i[:, 0] - q1i[:, 2] * q2i[:, 1] + q1i[:, 1] * q2i[:, 2] + q1i[:, 0] * q2i[:, 3]
+
+ qout = np.array([qtmp0, qtmp1, qtmp2, qtmp3]).T
+
+ return qout
+
+
+def qdecompose(q):
+ """
+ Purpose: decompose quaternions into axes and angeles
+ Inputs: q: a 4 element quaternion or an Nx4 element array of quaternions
+ Returns: a 4 element array with a[0] = angle, and a[1:3] = axis, or
+ an Nx4 element array or -1L on failure
+ Notes: Implementation largely copied from the euve c library for
+ quaternions
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+ As per the euve implementation, if q[0] is outside of the range of
+ acos...[-1,1] the value of the quaternion will be turned into an
+ identity quaternion...in other words clipped, this seems suspect,
+ a better solution may be to wrap the value back into range using
+ modular arithmatic, future modifiers of this routine should consider
+ adding this.
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+ EPSILON = 1.0e-20 # Where sin(theta) is close enough to theta
+ # this is to avoid mutating the input variable
+ qi = q
+ # check to make sure input has the correct dimensions
+ qi = qvalidate(qi, 'q', 'qdecompose')
+ if isinstance(qi, int):
+ return qi
+ qdims = qi.shape
+ aout = np.zeros(qdims, dtype=np.float64)
+
+ # the following code will clip into range
+ idx = np.argwhere(qi[:, 0] >= 1.0).flatten()
+ if len(idx) != 0:
+ aout[idx, 0] = 0.0
+ aout[idx, 1] = 1.0
+ aout[idx, 2:3] = 0.0
+
+ idx = np.argwhere(qi[:, 0] <= -1.0).flatten()
+ if len(idx) != 0:
+ aout[idx, 0] = 2 * np.pi
+ aout[idx, 1] = 1.0
+ aout[idx, 2:3] = 0.0
+
+ idx = np.argwhere((qi[:, 0] > -1.0) & (qi[:, 0] < 1.0)).flatten()
+ if len(idx) != 0:
+ theta2 = np.arccos(qi[idx, 0])
+ aout[idx, 0] = 2 * theta2
+
+ idx2 = np.argwhere(theta2 < EPSILON).flatten()
+ if len(idx2) != 0:
+ aout[idx[idx2], 1] = 1.0
+ aout[idx[idx2], 2:3] = 0.0
+
+ idx2 = np.argwhere(theta2 >= EPSILON).flatten()
+ if len(idx2) != 0:
+ aout[idx[idx2], 1] = qi[idx[idx2], 1] / np.sin(theta2[idx2])
+ aout[idx[idx2], 2] = qi[idx[idx2], 2] / np.sin(theta2[idx2])
+ aout[idx[idx2], 3] = qi[idx[idx2], 3] / np.sin(theta2[idx2])
+
+ return aout.reshape(qdims)
+
+
+def qvalidate(q, argname, fxname):
+ """
+ Purpose: validate inputs for the idl quaternion library
+
+ Inputs: q: a 4 element array, or an Nx4 element array, representing quaternion(s)
+ argname: the name of the argument to be used in error messages
+
+ Returns: an Nx4 array or -1, it will turn 4 element quaternion arrays
+ into 1x4 element quaternion arrays
+
+ Notes: This function is here because I noticed a lot of the error
+ checking code was being repeated, and it was making the functions
+ long and hard to read
+
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+ # this is to avoid mutating the input variable
+ qi = deepcopy(q)
+
+ if isinstance(qi, int):
+ return -1
+
+ # check to make sure input has the correct dimensions
+ elif np.size(np.shape(qi)) == 1:
+ if np.size(qi) != 4:
+ logging.error('Wrong number of elements in quaternion ' + argname + '. Found when validating input for ' + fxname)
+ return -1
+ qi = np.reshape(qi, (1, 4))
+ elif np.size(np.shape(qi)) == 2:
+ s = np.shape(qi)
+ if s[np.size(s)-1] != 4:
+ logging.error('Dimension 2 of quaternion ' +argname+' must have 4 elements. Found when validating input for ' + fxname)
+ return -1
+ else:
+ logging.error('Quaternion '+argname+' has the wrong number of dimensions. Found when validating input for ' + fxname)
+ return -1
+
+ return qi
+
+
+def qconj(q):
+ """
+ Purpose: calculate the conjugate a quaternion or an array of quaternions
+
+ Inputs: q: a 4 element array, or an Nx4 element array, representing quaternion(s)
+
+ Returns: q*
+
+ Notes: Implementation largely copied from the euve c library for
+ quaternions
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+ # this is to avoid mutating the input variable
+ qi = deepcopy(q)
+
+ # check to make sure input has the correct dimensions
+ qi = qvalidate(qi, 'q', 'qconj')
+
+ if isinstance(qi, int):
+ return qi
+
+ # the actual conjugation
+ qtmp0 = qi[:, 0]
+ qtmp1 = -qi[:, 1]
+ qtmp2 = -qi[:, 2]
+ qtmp3 = -qi[:, 3]
+
+ qout = np.array([qtmp0, qtmp1, qtmp2, qtmp3]).T
+
+ if len(q.shape) == 1:
+ qout = qout.flatten()
+
+ return qout
+
+
+def qslerp(q, x1, x2, geometric=False, eq_tolerance=1e-12):
+ """
+ Uses spherical linear interpolation to interpolate quaternions between elements of q
+
+ Parameters
+ ----------
+ q : array_like
+ An Nx4 element array, representing a list of quaternions with N > 1, all quaternions must be unit quaternions(ie length/norm = 1)
+ x1 : array_like
+ The input abscissa values of the quaternions,an array of length N, abscissa values must also be monotonic
+ x2 : array_like
+ The output abscissa values for the quaternions, can have as many elements as wanted but must fall on the interval [x[0],x[N-1]], an M element array, abscissa values must also be monotonic
+ geometric : bool, optional
+ This keyword allows you to specify that it use the geometric formula for the slerp. The default formula is probably faster and more numerically stable, the geometric option is just available for testing
+ Testing of the geometric method indicates that the norm of the interpolated quaternions strays easily from unit length, when it renormalizes results may be destabilized
+ eq_tolerance : float, optional
+ Set to specify the tolerance used when determining whether two numbers are equal (default: 1e-12). This tolerance will be used in checking equivalence of:
+ -quaternion lengths
+ -input vs. output abscissae
+ -quaternion direction (inner product)
+
+ Returns
+ -------
+ q_out : array_like
+ An Mx4 element array of interpolated quaternions or -1L on failure
+
+ Notes
+ -----
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as an eigenvalue of the rotation the quaterion performs
+
+ The scalar component can be thought of as the amount of rotation that the quaternion performs
+
+ While the code may seem a little esoteric, it is vectorized and provides the most accurate results it can get
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+ qi = deepcopy(q)
+ x2i = deepcopy(x2)
+ x1i = deepcopy(x1)
+
+ # check that quaternions are consistent with generic quaternion invariants
+ qi = qvalidate(qi,'qi','qslerp')
+
+ if isinstance(qi, int):
+ return qi
+
+ # check that input quaternions are unit length
+ qn = qnorm(qi)
+
+ idx = np.argwhere(np.abs(qn - 1.0) > eq_tolerance).flatten()
+ if len(idx) > 0:
+ logging.error('At least one input quaternion is not unit length')
+ return
+
+ if qi.shape[0] != len(x1i):
+ logging.error('Number of input abscissa values does not match the number of input quaternions')
+ return
+
+ # check that input abscissa values are monotonic
+ if len(x1i) > 1:
+ idx = np.argwhere((x1i[1:len(x1i)]-x1i[0:len(x1i)-1]) < 0)
+ if len(idx) > 0:
+ logging.error('input abscissa values not monotonic')
+ return
+
+ # check that output abscissa values are strictly monotonic
+ if len(x2i) > 1:
+ idx = np.argwhere((x2i[1:len(x2i)]-x2i[0:len(x2i)-1]) < 0)
+ if len(idx) > 0:
+ logging.error('output abscissa values not monotonic')
+ return
+
+ # construct the output array
+ q_out = np.zeros((len(x2i), 4))
+
+ # if output abscissa values are outside of the range of input abscissa
+ # values constant extrapolation is used
+ idx = np.argwhere(x2i < x1i[0]).flatten()
+
+ if len(idx) > 0:
+ q_out[idx, :] = np.array(idx.size*[qi[0, :]])
+
+ idx = np.argwhere(x2i > x1i[-1]).flatten()
+
+ if len(idx) > 0:
+ q_out[idx, :] = np.array(idx.size*[qi[-1, :]])
+
+ out_idx = np.argwhere((x2i >= x1i[0]) & (x2i <= x1i[-1])).flatten()
+
+ if len(out_idx) == 0:
+ return q_out.reshape((-1, 4))
+
+ x2i = x2i[out_idx]
+
+ # construct arguments to the slerp function, this includes the source
+ # quaternion list, the target quaternions list, and the proportion of
+ # interpolation list for each quaternion pair. They should all have
+ # the same number of elements as the output abscissa value list
+
+ t_temp = interpol(np.arange(qi.shape[0], dtype='float64'), x1i, x2i)
+
+ t_list = t_temp % 1.0
+
+ q_idx = np.int64(np.floor(t_temp))
+
+ # if the last abscissa values are identical,the indexing scheme to
+ # generate the q_list could generate an overflow, the two conditionals
+ # below prevent this
+ idx = np.argwhere(np.abs(t_list) <= eq_tolerance).flatten() # where t_list =~ 0.0
+ if len(idx) > 0:
+ q_out[out_idx[idx], :] = qi[q_idx[idx], :]
+
+ slerp_idx = np.argwhere(np.abs(t_list) > eq_tolerance).flatten() # where t_list !=~ 0.0
+
+ # if there is nothing left, then we're done
+ if slerp_idx.size == 0:
+ return q_out.reshape((-1, 4))
+
+ q_idx = q_idx[slerp_idx]
+ out_idx = out_idx[slerp_idx]
+ t_list = t_list[slerp_idx]
+
+ q1_list = qi[q_idx, :]
+
+ q2_list = qi[q_idx + 1, :]
+
+ # calculate the dot product which is needed to to flip the
+ # appropriate quaternions to guarantee interpolation is done along the
+ # shortest path
+ dotp = qdotp(q1_list, q2_list)
+
+ if dotp.ndim == 0 and dotp == -1:
+ return -1
+
+ # the following code flips quaternions in q2_list to ensure the
+ # shortest path is followed
+ idx = np.argwhere(dotp < 0.0).flatten()
+
+ if idx.size != 0:
+ q2_list[idx, :] = -q2_list[idx, :]
+
+ # interpolation cannot be performed on colinear quaternions
+ # it is assumed that colinear quaternions will be returned unchanged
+ # since dotp(q1,q2) = cos(angle between q1,q2) if dotp = 1.0 the
+ # quaternions are colinear
+ idx = np.argwhere(np.abs(dotp - 1.0) <= eq_tolerance).flatten() # where dotp = 1.0
+
+ # store colinear quaternions into output array
+ if idx.size != 0:
+ q_out[out_idx[idx], :] = q1_list[idx, :]
+
+ # copy non-colinear quaternions for processing
+ idx = np.argwhere(np.abs(dotp - 1.0) > eq_tolerance).flatten()
+
+ if idx.size == 0:
+ return q_out.reshape((-1, 4)) # if no non-colinear quaternions are left, we are done
+
+ dotp = dotp[idx]
+ t_list = t_list[idx]
+ q1_list = q1_list[idx, :]
+ q2_list = q2_list[idx, :]
+ out_idx = out_idx[idx]
+
+ # now the actual processing begins
+
+ # testing both methods to verify results
+ if geometric:
+ theta = np.arccos(dotp)
+
+ sin_theta = np.sin(theta)
+
+ theta_t = theta * t_list
+
+ co1 = np.sin(theta - theta_t) / sin_theta
+ co2 = np.sin(theta_t) / sin_theta
+
+ q_out[out_idx, 0] = co1 * q1_list[:, 0] + co2 * q2_list[:, 0]
+ q_out[out_idx, 1] = co1 * q1_list[:, 1] + co2 * q2_list[:, 1]
+ q_out[out_idx, 2] = co1 * q1_list[:, 2] + co2 * q2_list[:, 2]
+ q_out[out_idx, 3] = co1 * q1_list[:, 3] + co2 * q2_list[:, 3]
+ else:
+ # slerp will be performed by calculating:
+ # ((q2*(q1^-1))^t)*q1
+ # since the quaternions are unit q1^-1 = conjugate(q1)
+ # exponentiation can be calculated by transforming to
+ # polar form cos(theta*t)+v*sin(theta*t)
+ # theta = acos(q[0])
+ # NOTE: this potentially more numerically stable implementation needs
+ # to be verified by comparison to the geometric slerp
+ q1_conj = qconj(q1_list)
+
+ q2_q1_prod = qdecompose(qmult(q2_list, q1_conj))
+
+ if isinstance(q2_q1_prod, int):
+ return -1
+
+ # sometimes a dimension disappears.
+ if q2_q1_prod.ndim == 1 and q2_q1_prod.size == 4:
+ q2_q1_prod = q2_q1_prod.reshape((1, 4))
+
+ theta_scale = q2_q1_prod[:, 0] * t_list
+
+ q_total = qmult(qcompose(q2_q1_prod[:, 1:4], theta_scale), q1_list)
+
+ if isinstance(q_total, int):
+ return -1
+
+ q_out[out_idx, :] = q_total
+
+ return qnormalize(q_out)
+
+
+def qdotp(q1, q2):
+ """
+ Purpose: calculate the dot product of two quaternions or two arrays of quaternions
+
+ Inputs: q1: a 4 element array, or an Nx4 element array, representing quaternion(s)
+ q2: a 4 element array, or an Nx4 element array, representing quaternion(s)
+
+ Returns: q1.q2, or -1 on failure
+
+ Notes:
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ The scalar component can be thought of as the amount of rotation that
+ the quaternion performs
+
+ like any vector the if t = the angle between q1 and q2 in 4-space
+ the q1.q2 = ||q1||*||q2||*cos(t) where || denotes the norm(length) of
+ the quaternion in 4-space
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+ q1i = deepcopy(q1)
+ q2i = deepcopy(q2)
+ qout = np.nansum(q1i*q2i, axis=1)
+ return qout
+
+
+def qnorm(q):
+ """
+ Purpose: calculate the norm a quaternion or an array of quaternions
+
+ Inputs: q: a 4 element array, or an Nx4 element array, representing quaternion(s)
+
+ Returns: norm(q): sqrt(a^2+b^2+c^2+d^2) or -1L on fail
+ will be a single element or an N length array
+
+ Notes: Implementation largely copied from the euve c library for
+ quaternions
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ This implementation of norm does not apply the squareroot sometimes
+ applied to a norm. If required the sqrt can easily be applied by the user
+
+ This routine is based on the IDL version by Patrick Cruce
+
+ """
+ qi = deepcopy(q)
+ dotp = qdotp(qi, qi)
+ return np.sqrt(dotp)
+
+
+def qnormalize(q):
+ """
+ Purpose: normalize a quaternion or an array of quaternions
+
+ Inputs: q: a 4 element array, or an Nx4 element array, representing quaternion(s)
+
+ Returns: q/(sqrt(norm(q))) or -1L on fail
+
+ Notes: Implementation largely copied from the euve c library for
+ quaternions
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+ qi = deepcopy(q)
+ qn = qnorm(qi)
+
+ qtmp0 = qi[:, 0]/qn
+ qtmp1 = qi[:, 1]/qn
+ qtmp2 = qi[:, 2]/qn
+ qtmp3 = qi[:, 3]/qn
+
+ qout = np.array([qtmp0, qtmp1, qtmp2, qtmp3]).transpose()
+
+ idx = np.argwhere(qout[:, 0] > 1.0).flatten()
+ if len(idx) > 0:
+ qout[idx, 0] = 1.0
+ qout[idx, 1:4] = 0.0
+
+ idx = np.argwhere(qout[:, 0] < -1.0).flatten()
+ if len(idx) > 0:
+ qout[idx, 0] = -1.0
+ qout[idx, 1:4] = 0.0
+
+ return qout
+
+
+def mtoq(m):
+ """
+ Function: mtoq
+
+ Purpose: transforms a rotation matrix into a quaternion. If the
+ matrix does not perform a rotation, then its behavior may be ill-
+ defined
+
+ WARNING!!!! - this routine does not conform to the wikipedia definition. see warning for qtom.pro
+
+ Inputs: m: a 3x3 element array or an Nx3x3 element array
+
+ Returns: q
+
+ Notes: Implementation largely copied from the euve c library for
+ quaternions
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+ mi = deepcopy(m)
+
+ dims = np.shape(mi)
+
+ if len(dims) == 2:
+ if dims[0] != 3 or dims[1] != 3:
+ logging.error('Wrong dimensions in input matrix')
+ return -1
+
+ mi = np.reshape(m, (1, 3, 3))
+
+ dims = [1, dims]
+
+ elif len(dims) == 3:
+ if dims[1] != 3 or dims[2] != 3:
+ logging.error('Wrong dimensions in input matrix')
+ return -1
+ else:
+ logging.error('Wrong dimensions in input matrix')
+ return -1
+
+ qout = np.zeros((dims[0], 4))
+
+ arg = 1.0 + mi[:, 0, 0] + mi[:, 1, 1] + mi[:, 2, 2]
+
+ idx = np.argwhere(arg < 0.0)
+
+ if len(idx) != 0:
+ arg[idx] = 0.0
+
+ qout[:, 0] = 0.5 * np.sqrt(arg)
+
+ arg = 1.0 + mi[:, 0, 0] - mi[:, 1, 1] - mi[:, 2, 2]
+
+ idx = np.argwhere(arg < 0.0)
+
+ if len(idx) != 0:
+ arg[idx] = 0.0
+
+ qout[:, 1] = 0.5 * np.sqrt(arg)
+
+ arg = 1.0 - mi[:, 0, 0] + mi[:, 1, 1] - mi[:, 2, 2]
+
+ idx = np.argwhere(arg < 0.0)
+
+ if len(idx) != 0:
+ arg[idx] = 0.0
+
+ qout[:, 2] = 0.5 * np.sqrt(arg)
+
+ arg = 1.0 - mi[:, 0, 0] - mi[:, 1, 1] + mi[:, 2, 2]
+
+ idx = np.argwhere(arg < 0.0)
+
+ if len(idx) != 0:
+ arg[idx] = 0.0
+
+ qout[:, 3] = 0.5 * np.sqrt(arg)
+
+ imax = np.zeros(dims[0], dtype=int)
+ dmax = np.zeros(dims[0])
+
+ for i in range(4):
+ idx = np.argwhere(np.abs(qout[:, i]) > dmax)
+ if len(idx) != 0:
+ imax[idx] = i
+ dmax[idx] = qout[idx, i]
+
+ idx = np.argwhere(imax == 0)
+
+ if len(idx) != 0:
+ qout[idx, 1] = (mi[idx, 2, 1] - mi[idx, 1, 2]) / (4 * qout[idx, 0])
+ qout[idx, 2] = (mi[idx, 0, 2] - mi[idx, 2, 0]) / (4 * qout[idx, 0])
+ qout[idx, 3] = (mi[idx, 1, 0] - mi[idx, 0, 1]) / (4 * qout[idx, 0])
+
+ idx = np.argwhere(imax == 1)
+
+ if len(idx) != 0:
+ qout[idx, 2] = (mi[idx, 1, 0] + mi[idx, 0, 1]) / (4 * qout[idx, 1])
+ qout[idx, 3] = (mi[idx, 2, 0] + mi[idx, 0, 2]) / (4 * qout[idx, 1])
+ qout[idx, 0] = (mi[idx, 2, 1] - mi[idx, 1, 2]) / (4 * qout[idx, 1])
+
+ idx = np.argwhere(imax == 2)
+
+ if len(idx) != 0:
+ qout[idx, 3] = (m[idx, 2, 1] + m[idx, 1, 2]) / (4 * qout[idx, 2])
+ qout[idx, 0] = (m[idx, 0, 2] - m[idx, 2, 0]) / (4 * qout[idx, 2])
+ qout[idx, 1] = (m[idx, 1, 0] + m[idx, 0, 1]) / (4 * qout[idx, 2])
+
+ idx = np.argwhere(imax == 3)
+
+ if len(idx) != 0:
+ qout[idx, 0] = (mi[idx, 1, 0] - mi[idx, 0, 1]) / (4 * qout[idx, 3])
+ qout[idx, 1] = (mi[idx, 2, 0] + mi[idx, 0, 2]) / (4 * qout[idx, 3])
+ qout[idx, 2] = (mi[idx, 2, 1] + mi[idx, 1, 2]) / (4 * qout[idx, 3])
+
+ idx = np.argwhere(qout[:, 0] < 0.0)
+
+ if len(idx) != 0:
+ qout[idx, :] = -qout[idx, :]
+
+ qret = qnormalize(qout)
+
+ return np.reshape(qret, (dims[0], 4))
+
+
+def qtom(qi):
+ """
+ Purpose: transforms quaternions into rotation matrices
+ WARNING!!! It appears that this routine returns the transpose (inverse) of the rotation matrix!
+ It differs from the CSPICE library and Wikipedia
+
+ Inputs: a 4 element array representing a quaternion or an Nx4 element
+ array representing an array of quaternions
+
+ Returns: a 3x3 matrix or an Nx3x3 array
+
+ Notes: Implementation largely copied from the euve c library for
+ quaternions
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+
+ if isinstance(qi, int):
+ return -1
+
+ e00 = qi[:, 0] * qi[:, 0]
+ e11 = qi[:, 1] * qi[:, 1]
+ e22 = qi[:, 2] * qi[:, 2]
+ e33 = qi[:, 3] * qi[:, 3]
+ e01 = 2 * qi[:, 0] * qi[:, 1]
+ e02 = 2 * qi[:, 0] * qi[:, 2]
+ e03 = 2 * qi[:, 0] * qi[:, 3]
+ e12 = 2 * qi[:, 1] * qi[:, 2]
+ e13 = 2 * qi[:, 1] * qi[:, 3]
+ e23 = 2 * qi[:, 2] * qi[:, 3]
+
+ mout = np.zeros((len(e00), 3, 3))
+
+ mout[:, 0, 0] = e00 + e11 - e22 - e33
+ mout[:, 1, 0] = e12 + e03
+ mout[:, 2, 0] = e13 - e02
+ mout[:, 0, 1] = e12 - e03
+ mout[:, 1, 1] = e00 - e11 + e22 - e33
+ mout[:, 2, 1] = e23 + e01
+ mout[:, 1, 2] = e23 - e01
+ mout[:, 0, 2] = e13 + e02
+ mout[:, 2, 2] = e00 - e11 - e22 + e33
+
+ return mout
+
+
+def qcompose(vec, theta, free=True):
+ """
+ Purpose: compose quaternions from vectors and angles
+ Inputs: vec: 3 element array or an Nx3 element array
+ theta: an angle or an N element array of angles(in radians)
+ Keywords: free: Flag to allow thetas outside [0,pi)
+ Returns: a 4 element quaternion or an Nx4 element array of quaternions
+ Notes: Implementation largely copied from the euve c library for
+ quaternions
+ Represention has q[0] = scalar component
+ q[1] = vector x
+ q[2] = vector y
+ q[3] = vector z
+ The vector component of the quaternion can also be thought of as
+ an eigenvalue of the rotation the quaterion performs
+
+ This routine is based on the IDL version by Patrick Cruce
+ """
+
+ # Constant indicating where sin(theta) is close enough to theta
+ epsilon = 1.0e-20
+
+ vi = deepcopy(vec)
+ thi = deepcopy(theta)
+
+ # this next block of code moves angles into the range [0,PI)
+ if not free:
+ thi = thi % np.pi
+
+ idx = np.argwhere(thi < 0)
+
+ if len(idx) > 1:
+ thi[idx] += np.pi
+
+ # calculate the vector norm
+ norm = np.sqrt(np.nansum(vi*vi, axis=1))
+
+ # decide which quaternions become identity vectors
+ idx1 = np.argwhere(norm < epsilon).flatten()
+ idx2 = np.argwhere(norm >= epsilon).flatten()
+
+ out_arr = np.zeros((len(norm), 4))
+
+ if len(idx1) > 0:
+ out_arr[idx1, 0] = 1.0
+ out_arr[idx1, 1:4] = 0.0
+
+ if len(idx2) > 0:
+ out_arr[idx2, 0] = np.cos(thi[idx2]/2.0)
+
+ stheta2 = np.sin(thi[idx2]/2.0)
+
+ out_arr[idx2, 1] = (stheta2 * vi[idx2, 0])/norm[idx2]
+ out_arr[idx2, 2] = (stheta2 * vi[idx2, 1])/norm[idx2]
+ out_arr[idx2, 3] = (stheta2 * vi[idx2, 2])/norm[idx2]
+
+ return out_arr
diff --git a/pyspedas/cotrans/tests/cotrans.py b/pyspedas/cotrans/tests/cotrans.py
index d285ae2a..2c51e690 100644
--- a/pyspedas/cotrans/tests/cotrans.py
+++ b/pyspedas/cotrans/tests/cotrans.py
@@ -11,38 +11,94 @@
"""
import unittest
import pyspedas
+import logging
from pyspedas.themis.cotrans.dsl2gse import dsl2gse
from pyspedas.cotrans.cotrans import cotrans
-from pyspedas.cotrans.cotrans_get_coord import cotrans_get_coord
-from pyspedas.cotrans.cotrans_set_coord import cotrans_set_coord
+from pyspedas.cotrans.fac_matrix_make import fac_matrix_make
from pytplot import get_data, store_data, del_data
+from pyspedas import cotrans_get_coord, cotrans_set_coord
class CotransTestCases(unittest.TestCase):
"""Tests for cotrans."""
+ def test_fac_matrix_make(self):
+ doesntexist = fac_matrix_make('doesnt_exist')
- def test_get_set_coord(self):
- """ Test for cotrans_set_coord/cotrans_get_coord """
+ def test_get_set_coord_wrappers(self):
+ """ Test for cotrans_set_coord/cotrans_get_coord wrappers """
+ del_data()
doesntexist = cotrans_get_coord('test_coord')
- self.assertTrue(doesntexist == None)
+ self.assertTrue(doesntexist is None)
store_data('test_coord', data={'x': [1, 2, 3, 4, 5], 'y': [1, 1, 1, 1, 1]})
+ cotrans(name_in='test_coord', coord_out="geo")
before = cotrans_get_coord('test_coord')
- self.assertTrue(before == None)
+ self.assertTrue(before is None)
setcoord = cotrans_set_coord('test_coord', 'GSE')
self.assertTrue(setcoord)
after = cotrans_get_coord('test_coord')
self.assertTrue(after == 'GSE')
+ md = get_data('test_coord',metadata=True)
+ md['data_att']['units'] = 'km'
setcoord = cotrans_set_coord('test_coord', 'GSM')
self.assertTrue(setcoord)
+ md_after = get_data('test_coord',metadata=True)
after = cotrans_get_coord('test_coord')
self.assertTrue(after == 'GSM')
+ self.assertTrue(md_after['data_att']['units'] == 'km')
+ setcoord = cotrans_set_coord('doesnt_exist', 'GSM')
+
+ def test_get_set_coords(self):
+ """ Test for pytplot.set_coords/get_coords """
+ from pytplot import set_coords,get_coords
+ del_data()
+ doesntexist = get_coords('test_coord')
+ self.assertTrue(doesntexist is None)
+ store_data('test_coord', data={'x': [1, 2, 3, 4, 5], 'y': [1, 1, 1, 1, 1]})
+ cotrans(name_in='test_coord', coord_out="geo")
+ before = get_coords('test_coord')
+ self.assertTrue(before is None)
+ setcoord = set_coords('test_coord', 'GSE')
+ self.assertTrue(setcoord)
+ after = get_coords('test_coord')
+ self.assertTrue(after == 'GSE')
+ md = get_data('test_coord',metadata=True)
+ md['data_att']['units'] = 'km'
+ setcoord = set_coords('test_coord', 'GSM')
+ self.assertTrue(setcoord)
+ md_after = get_data('test_coord',metadata=True)
+ after = get_coords('test_coord')
+ self.assertTrue(after == 'GSM')
+ self.assertTrue(md_after['data_att']['units'] == 'km')
+ setcoord = set_coords('doesnt_exist', 'GSM')
+
+ def test_get_set_units(self):
+ """ Test for pytplot.set_coords/get_coords """
+ from pytplot import set_units,get_units, set_coords, get_coords
+
+ del_data()
+ doesntexist = get_units('test_units')
+ self.assertTrue(doesntexist is None)
+ store_data('test_units', data={'x': [1, 2, 3, 4, 5], 'y': [1, 1, 1, 1, 1]})
+ before = get_units('test_units')
+ self.assertTrue(before is None)
+ setunits = set_units('test_units', 'Km')
+ self.assertTrue(setunits)
+ after = get_units('test_units')
+ self.assertTrue(after == 'Km')
+ set_coords('test_units','GEO')
+ setunits = set_units('test_units', 'mm')
+ self.assertTrue(setunits)
+ coords_after=get_coords('test_units')
+ units_after=get_units('test_units')
+ self.assertTrue(coords_after == 'GEO')
+ self.assertTrue(units_after == 'mm')
def test_dsl2gse(self):
"""Test themis.cotrans.dsl2gse."""
del_data()
# Try with missing variables. It should exit without problems.
- dsl2gse('tha_fgl_dsl', 'tha_spinras', 'tha_spindec', 'tha_fgl_gse')
+ dsl2gse('tha_fgl_dsl','tha_fgl_gse')
# Now load the needed variables.
time_range = ['2017-03-23 00:00:00', '2017-03-23 23:59:59']
pyspedas.themis.state(probe='a', trange=time_range,
@@ -51,12 +107,14 @@ def test_dsl2gse(self):
pyspedas.themis.fgm(probe='a', trange=time_range,
varnames=['tha_fgl_dsl'])
- dsl2gse('tha_fgl_dsl', 'tha_spinras', 'tha_spindec', 'tha_fgl_gse')
+ fac_matrix_make('tha_fgl_dsl')
+
+ dsl2gse('tha_fgl_dsl', 'tha_fgl_gse')
t, d = get_data('tha_fgl_gse')
# Now test the inverse.
- dsl2gse('tha_fgl_dsl', 'tha_spinras', 'tha_spindec', 'tha_fgl_gse',
- isgsetodsl=1)
+ dsl2gse('tha_fgl_dsl', 'tha_fgl_gse',
+ isgsetodsl=True)
self.assertTrue(abs(d[0].tolist()[0]-15.905078404701147) <= 1e-6)
self.assertTrue(abs(d[0].tolist()[1]--13.962618931740064) <= 1e-6)
@@ -84,6 +142,21 @@ def test_cotrans(self):
cotrans(name_in=name_in, coord_in="gei", coord_out="geo")
self.assertTrue(out_len == in_len)
+ def test_cotrans_coord_mismatch(self):
+ """Test that cotrans rejects a request where in_coord does not match the system from the variable metadata."""
+ del_data()
+ trange = ['2010-02-25/00:00:00', '2010-02-25/23:59:59']
+ probe = 'a'
+ name_in = "tha_pos"
+ name_out = "tha_pos_new_geo"
+ pyspedas.themis.state(probe=probe, trange=trange,
+ time_clip=True, varnames=[name_in])
+ # Metadata coordinate system is GEI, but requesting GSM->GEO transform. This should generate an error message
+ # and return failure.
+ result = cotrans(name_in=name_in, name_out=name_out,
+ coord_in="gsm", coord_out="geo")
+ self.assertTrue(result == 0)
+
def test_cotrans_igrf(self):
"""Test GSE->GSM and IGRF."""
del_data()
@@ -160,9 +233,14 @@ def test_all_cotrans(self):
name1 = "name1"
name2 = "name2"
count = 0
- # Test non-existent system.
- cotrans(name_out=name1, time_in=t, data_in=d,
- coord_in="coord_in", coord_out="coord_out")
+ # Test non-existent systems.
+ result = cotrans(name_out=name1, time_in=t, data_in=d,
+ coord_in="badcoord", coord_out="gei")
+ self.assertTrue(result == 0)
+ result = cotrans(name_out=name1, time_in=t, data_in=d,
+ coord_in="gei", coord_out="badcoord")
+ self.assertTrue(result == 0)
+
# Test empty data.
cotrans(name_out=name1, time_in=t, data_in=[],
coord_in="gei", coord_out="geo")
@@ -183,7 +261,7 @@ def test_all_cotrans(self):
dout2 = get_data(name2)
out_len2 = len(dout2[0])
dd2 = dout2[1][1]
- print(count, "--- in:", coord_in, "out:", coord_out)
+ logging.info("%d --- in: %s out: %s", count, coord_in, coord_out)
# print(dout[1][1])
# print(dd2)
self.assertTrue(out_len2 == in_len)
diff --git a/pyspedas/cotrans/tests/quaternions.py b/pyspedas/cotrans/tests/quaternions.py
new file mode 100644
index 00000000..7e0454bc
--- /dev/null
+++ b/pyspedas/cotrans/tests/quaternions.py
@@ -0,0 +1,32 @@
+import unittest
+import numpy as np
+from pyspedas.cotrans.quaternions import qslerp, qcompose, qconj, mtoq, qtom, qvalidate, qmult, qdecompose
+
+
+class Qtests(unittest.TestCase):
+ def test_qslerp(self):
+ m1 = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
+ m2 = np.array([[-1, 0, 0], [0, -1, 0], [0, 0, 1]])
+ qin = np.transpose(np.array([mtoq(m1), mtoq(m2)])).reshape([4, 2]).transpose()
+ x1 = np.array([0.0, 1.0])
+ x2 = np.array([0.0, 1.0/6.0, 1.0/3.0, 1.0/2.0, 2.0/3.0, 5.0/6.0, 1.0])
+ qout = qslerp(qin, x1, x2)
+ qout_geo = qslerp(qin, x1, x2, geometric=True)
+ mout = qtom(qout)
+ qcomp = qcompose(np.array([[1, 2, 3]]), np.array([4]), free=False)
+ self.assertTrue(np.abs(np.sum(qout-qout_geo))<1e-6)
+
+ def test_errors(self):
+ qs1 = np.ones((7, 5))
+ qs2 = np.ones((8, 4))
+ qs3 = np.ones(4)
+ self.assertTrue(qvalidate(qs1, 'qs1', 'qslerp') == -1)
+ self.assertTrue(qmult(qs1, qs2) == -1)
+ self.assertTrue(qdecompose(np.array(1)) == -1)
+ self.assertTrue(qdecompose(1) == -1)
+ self.assertTrue(qconj(1) == -1)
+ self.assertTrue(qslerp(qs1, qs1, qs1) == -1)
+
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
diff --git a/pyspedas/cotrans/tests/test_minvar.py b/pyspedas/cotrans/tests/test_minvar.py
new file mode 100644
index 00000000..7d4c1c61
--- /dev/null
+++ b/pyspedas/cotrans/tests/test_minvar.py
@@ -0,0 +1,67 @@
+"""
+Unit Tests for minvar function.
+"""
+from pyspedas.cotrans.minvar import minvar
+import numpy as np
+import unittest
+
+
+class TestMinvar(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ # Test tolerance
+ cls.tol = 1e-10
+
+ # Define a random data array
+ rng = np.random.default_rng(seed=31415)
+ cls.rdata = rng.random((10, 3))
+
+ def test_minvar_basic(self):
+ """Test of basic input and output"""
+
+ # Synthetic data of zeros
+ data = np.zeros([2, 3])
+ vrot, v, w = minvar(data)
+ self.assertTrue(np.sum(vrot - data) < self.tol)
+ self.assertTrue(np.sum(v - np.diag(np.ones(3))) < self.tol)
+ self.assertTrue(np.sum(w - np.zeros(3)) < self.tol)
+
+ def test_minvar_rotation(self):
+ """Test of the rotation matrix"""
+ vrot, v, w = minvar(self.rdata)
+ # Determinant of rotation matrix should be = 1
+ self.assertTrue((np.linalg.det(v) - 1) < self.tol)
+
+ def test_minvar_total(self):
+ """Test of same square root of total of squares """
+ vrot, v, w = minvar(self.rdata)
+ total1 = (self.rdata**2).sum(axis=1)
+ total2 = (vrot ** 2).sum(axis=1)
+ # Compare totals
+ self.assertTrue(np.sum(total1 - total2) < self.tol)
+
+ def test_minrar_code_coverage(self):
+ """Test to cover the code from IDL"""
+ data = np.array([[0, 0, 1], [0, 0, 1]])
+ vrot, v, w = minvar(data)
+ # Case of np.sum(w) == 0.0
+ self.assertTrue(w.sum() < self.tol)
+
+ # This should be not Right Handed (?...)
+ data = np.array([[0, -1, 1], [-1, -1, 1]])
+ # case if YcrossZdotX < 0
+ vrot, v, w = minvar(data)
+ YcrossZdotX = v[0, 0] * (v[1, 1] * v[2, 2] - v[2, 1] * v[1, 2])
+ # YcrossZdotX Should be positive after that
+ self.assertTrue(YcrossZdotX > 0)
+
+ # should tigger case if v[2, 2] < 0: (?...)
+ data = np.array([[-0.1, -0.9, 0.5], [-1, 1, -0.9]])
+ vrot, v, w = minvar(data)
+ # v[2,2] Should be positive after that
+ self.assertTrue(v[2, 2] > 0)
+
+
+if __name__ == '__main__':
+ unittest.main()
+
diff --git a/pyspedas/cotrans/tvector_rotate.py b/pyspedas/cotrans/tvector_rotate.py
index 5815aeaa..ac78dd73 100644
--- a/pyspedas/cotrans/tvector_rotate.py
+++ b/pyspedas/cotrans/tvector_rotate.py
@@ -1,7 +1,9 @@
-
+import logging
import numpy as np
from pyspedas import tnames, tinterpol
from pytplot import get_data, store_data
+from pyspedas.cotrans.quaternions import qtom, mtoq, qslerp
+from pyspedas.cotrans.matrix_array_lib import ctv_verify_mats, ctv_left_mats, ctv_swap_hands
def tvector_rotate(mat_var_in, vec_var_in, newname=None):
@@ -31,11 +33,11 @@ def tvector_rotate(mat_var_in, vec_var_in, newname=None):
"""
if tnames(mat_var_in) == []:
- print('Transformation requires the matrix variable to be set to a valid tplot variable.')
+ logging.error('Transformation requires the matrix variable to be set to a valid tplot variable.')
return
if tnames(vec_var_in) == []:
- print('Transformation requires the vector variables to be set to a valid tplot variable.')
+ logging.error('Transformation requires the vector variables to be set to a valid tplot variable.')
return
vec_var_in = tnames(vec_var_in)
@@ -47,17 +49,37 @@ def tvector_rotate(mat_var_in, vec_var_in, newname=None):
newname = [newname]
if len(newname) != len(vec_var_in):
- print('Length of newname keyword should match the length of vec_var_in')
+ logging.error('Length of newname keyword should match the length of vec_var_in')
return
out_names = []
- mat_data = get_data(mat_var_in)
-
# loop over the vectors
for vec_var, new_var in zip(vec_var_in, newname):
vec_data = get_data(vec_var)
vec_metadata = get_data(vec_var, metadata=True)
+ mat_data = get_data(mat_var_in)
+ m_d_y = mat_data.y
+
+ if not np.array_equal(vec_data.times, mat_data.times) and len(mat_data.times) != 1:
+ verify_check = ctv_verify_mats(mat_data.y)
+
+ is_left_mat = ctv_left_mats(mat_data.y)
+
+ # left-handed matrices can mess up qslerping
+ if is_left_mat:
+ q_in = mtoq(ctv_swap_hands(mat_data.y))
+ else:
+ q_in = mtoq(mat_data.y)
+
+ # interpolate quaternions
+ q_out = qslerp(q_in, mat_data.times, vec_data.times)
+
+ # turn quaternions back into matrices
+ m_d_y = qtom(q_out)
+
+ if is_left_mat:
+ m_d_y = ctv_swap_hands(m_d_y)
if not np.array_equal(vec_data.times, mat_data.times) and len(mat_data.times) != 1:
print('Interpolating the matrix timestamps to the vector time stamps')
@@ -67,10 +89,10 @@ def tvector_rotate(mat_var_in, vec_var_in, newname=None):
vec_fac = np.zeros((len(vec_data.times), len(vec_data.y[0, :])))
for i in range(0, len(vec_data.times)):
- if mat_data.y.shape[0] == 1: # only a single matrix
- matrix = mat_data.y[0, :, :]
+ if m_d_y.shape[0] == 1: # only a single matrix
+ matrix = m_d_y[0, :, :]
else:
- matrix = mat_data.y[i, :, :]
+ matrix = m_d_y[i, :, :]
vec_fac[i, :] = matrix @ vec_data.y[i, :]
diff --git a/pyspedas/cotrans/xyz_to_polar.py b/pyspedas/cotrans/xyz_to_polar.py
new file mode 100644
index 00000000..2a0b266e
--- /dev/null
+++ b/pyspedas/cotrans/xyz_to_polar.py
@@ -0,0 +1,35 @@
+import numpy as np
+
+
+def xyz_to_polar(data, co_latitude=False):
+ """
+ Convert cartesian coordinates to polar coordinates.
+
+ Parameters
+ ----------
+ x : numpy.ndarray
+ x-component of the vector
+ y : numpy.ndarray
+ y-component of the vector
+ z : numpy.ndarray
+ z-component of the vector
+
+ Returns
+ -------
+ r : numpy.ndarray
+ radial component of the vector
+ theta : numpy.ndarray
+ polar angle of the vector
+ phi : numpy.ndarray
+ azimuthal angle of the vector
+ """
+ x = data[:, 0]
+ y = data[:, 1]
+ z = data[:, 2]
+ out = np.zeros(data.shape)
+ out[:, 0] = np.sqrt(x**2 + y**2 + z**2)
+ out[:, 1] = np.arccos(z/out[:, 0])*180.0/np.pi
+ out[:, 2] = np.arctan2(y, x)*180.0/np.pi
+ if not co_latitude:
+ out[:, 1] = 90.0 - out[:, 1]
+ return out
diff --git a/pyspedas/csswe/__init__.py b/pyspedas/csswe/__init__.py
index 47803d77..23e8813b 100644
--- a/pyspedas/csswe/__init__.py
+++ b/pyspedas/csswe/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def reptile(trange=['2013-11-5', '2013-11-6'],
datatype='flux',
@@ -66,3 +67,8 @@ def reptile(trange=['2013-11-5', '2013-11-6'],
"""
return load(instrument='reptile', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ out = find_datasets(mission='Smallsats/Cubesats', instrument='csswe', label=label)
+ return out
diff --git a/pyspedas/csswe/load.py b/pyspedas/csswe/load.py
index e4b8b037..20c50824 100644
--- a/pyspedas/csswe/load.py
+++ b/pyspedas/csswe/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/csswe/tests/tests.py b/pyspedas/csswe/tests/tests.py
index c95e4a73..df3ab9b4 100644
--- a/pyspedas/csswe/tests/tests.py
+++ b/pyspedas/csswe/tests/tests.py
@@ -1,18 +1,22 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_load_rep_data(self):
rep_vars = pyspedas.csswe.reptile(time_clip=True)
self.assertTrue(data_exists('E1flux'))
+ def test_load_notplot(self):
+ rep_vars = pyspedas.csswe.reptile(notplot=True)
+ self.assertTrue('E1flux' in rep_vars)
+
def test_downloadonly(self):
files = pyspedas.csswe.reptile(downloadonly=True, trange=['2014-2-15', '2014-2-16'])
self.assertTrue(os.path.exists(files[0]))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/de2/README.md b/pyspedas/de2/README.md
new file mode 100644
index 00000000..56d0a5aa
--- /dev/null
+++ b/pyspedas/de2/README.md
@@ -0,0 +1,95 @@
+
+## Dynamics Explorer 2 (DE2)
+The routines in this module can be used to load data from the Dynamics Explorer 2 (DE2) mission.
+
+### Instruments
+- Magnetometer (MAG)
+- Neutral Atmosphere Composition Spectrometer (NACS)
+- Retarding Potential Analyzer (RPA)
+- Fabry-PĂ©rot Interferometer (FPI)
+- Ion Drift Meter (IDM)
+- Wind and Temperature Spectrometer (WATS)
+- Vector Electric Field Instrument (VEFI)
+- Langmuir Probe Instrument (LANG)
+
+### Examples
+Get started by importing pyspedas and tplot; these are required to load and plot the data:
+
+```python
+import pyspedas
+from pytplot import tplot
+```
+
+#### Magnetometer (MAG)
+
+```python
+mag_vars = pyspedas.de2.mag(trange=['1983-02-16', '1983-02-17'])
+
+tplot(['bx', 'by', 'bz'])
+```
+
+
+#### Neutral Atmosphere Composition Spectrometer (NACS)
+
+```python
+nacs_vars = pyspedas.de2.nacs(trange=['1983-02-16', '1983-02-17'])
+
+tplot(['O_density', 'N_density'])
+```
+
+
+#### Retarding Potential Analyzer (RPA)
+
+```python
+rpa_vars = pyspedas.de2.rpa(trange=['1983-02-16', '1983-02-17'])
+
+tplot(['ionDensity', 'ionTemperature'])
+```
+
+
+#### Fabry-PĂ©rot Interferometer (FPI)
+
+```python
+fpi_vars = pyspedas.de2.fpi(trange=['1983-02-16', '1983-02-17'])
+
+tplot('TnF')
+```
+
+
+#### Ion Drift Meter (IDM)
+
+```python
+idm_vars = pyspedas.de2.idm(trange=['1983-02-16', '1983-02-17'])
+
+tplot(['ionVelocityZ', 'ionVelocityY'])
+```
+
+
+#### Wind and Temperature Spectrometer (WATS)
+
+```python
+wats_vars = pyspedas.de2.wats(trange=['1983-02-16', '1983-02-17'])
+
+tplot(['density', 'Tn'])
+```
+
+
+#### Vector Electric Field Instrument (VEFI)
+
+```python
+vefi_vars = pyspedas.de2.vefi(trange=['1983-02-16', '1983-02-17'])
+
+tplot(['spectA', 'spectB', 'spectC'])
+```
+
+
+#### Langmuir Probe Instrument (LANG)
+
+```python
+lang_vars = pyspedas.de2.lang(trange=['1983-02-16', '1983-02-17'])
+
+tplot(['plasmaDensity', 'electronTemp'])
+```
+
+
+
\ No newline at end of file
diff --git a/pyspedas/de2/__init__.py b/pyspedas/de2/__init__.py
new file mode 100644
index 00000000..19a94f0b
--- /dev/null
+++ b/pyspedas/de2/__init__.py
@@ -0,0 +1,595 @@
+from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
+
+def mag(trange=['1983-02-16', '1983-02-17'],
+ datatype='62ms',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Magnetometer (MAG)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options: '62ms'
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+
+ tvars = load(instrument='mag', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return mag_postprocessing(tvars)
+
+
+def mag_postprocessing(variables):
+ """
+ Placeholder for MAG post-processing
+ """
+ return variables
+
+
+def nacs(trange=['1983-02-01', '1983-02-02'],
+ datatype='neutral1s',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Neutral Atmosphere Composition Spectrometer (NACS)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options: 'neutral1s'
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars= load(instrument='nacs', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return nacs_postprocessing(tvars)
+
+
+def nacs_postprocessing(variables):
+ """
+ Placeholder for NACS post-processing
+ """
+ return variables
+
+
+def rpa(trange=['1983-02-16', '1983-02-17'],
+ datatype='ion2s',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Retarding Potential Analyzer (RPA)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options: 'ion2s'
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars= load(instrument='rpa', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return rpa_postprocessing(tvars)
+
+
+def rpa_postprocessing(variables):
+ """
+ Placeholder for RPA post-processing
+ """
+ return variables
+
+
+def fpi(trange=['1983-02-16', '1983-02-17'],
+ datatype='8s',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Fabry-PĂ©rot Interferometer (FPI)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ '8s' for 8-second resolution data
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='fpi', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return fpi_postprocessing(tvars)
+
+
+def fpi_postprocessing(variables):
+ """
+ Placeholder for FPI post-processing
+ """
+ return variables
+
+
+def idm(trange=['1983-02-16', '1983-02-17'],
+ datatype='250ms',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Ion Drift Meter (IDM)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options: '250ms'
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='idm', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return idm_postprocessing(tvars)
+
+
+def idm_postprocessing(variables):
+ """
+ Placeholder for IDM post-processing
+ """
+ return variables
+
+
+def wats(trange=['1983-02-16', '1983-02-17'],
+ datatype='2s',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Wind and Temperature Spectrometer (WATS)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ '2s' for 2 second data
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='wats', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return wats_postprocessing(tvars)
+
+
+def wats_postprocessing(variables):
+ """
+ Placeholder for WATS post-processing
+ """
+ return variables
+
+
+def vefi(trange=['1983-02-16', '1983-02-17'],
+ datatype='ac500ms',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Vector Electric Field Instrument (VEFI)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'ac500ms', 'dca500ms'
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='vefi', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return vefi_postprocessing(tvars)
+
+
+def vefi_postprocessing(variables):
+ """
+ Placeholder for VEFI post-processing
+ """
+ return variables
+
+
+def lang(trange=['1983-02-16', '1983-02-17'],
+ datatype='500ms',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Langmuir Probe Instrument (LANG)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options: '500ms'
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='lang', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return lang_postprocessing(tvars)
+
+
+def lang_postprocessing(variables):
+ """
+ Placeholder for LANG post-processing
+ """
+ return variables
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Dynamics Explorer', instrument=instrument, label=label)
diff --git a/pyspedas/de2/config.py b/pyspedas/de2/config.py
new file mode 100644
index 00000000..ef2b881b
--- /dev/null
+++ b/pyspedas/de2/config.py
@@ -0,0 +1,12 @@
+import os
+
+CONFIG = {'local_data_dir': 'de2_data/',
+ 'remote_data_dir': 'https://spdf.gsfc.nasa.gov/pub/data/de/de2/'}
+
+# override local data directory with environment variables
+if os.environ.get('SPEDAS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'de2'])
+
+if os.environ.get('DE2_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.environ['DE2_DATA_DIR']
+
\ No newline at end of file
diff --git a/pyspedas/de2/load.py b/pyspedas/de2/load.py
new file mode 100644
index 00000000..2504ae15
--- /dev/null
+++ b/pyspedas/de2/load.py
@@ -0,0 +1,78 @@
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pytplot import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+
+def load(trange=['1983-02-16', '1983-02-17'],
+ instrument='mag',
+ datatype='',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the DE2 mission; this function is not meant
+ to be called directly; instead, see the wrappers:
+
+ pyspedas.de2.mag
+ pyspedas.de2.nacs
+ pyspedas.de2.rpa
+ pyspedas.de2.fpi
+ pyspedas.de2.idm
+ pyspedas.de2.wats
+ pyspedas.de2.vefi
+ pyspedas.de2.lang
+
+ """
+
+ if instrument == 'mag':
+ pathformat = 'magnetic_electric_fields_vefi_magb/'+datatype+'_vefimagb_cdaweb/%Y/de2_'+datatype+'_vefimagb_%Y%m%d_v??.cdf'
+ elif instrument == 'nacs':
+ pathformat = 'neutral_gas_nacs/'+datatype+'_'+instrument+'_cdaweb/%Y/de2_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf'
+ elif instrument == 'rpa':
+ pathformat = 'plasma_rpa/'+datatype+'_cdaweb/%Y/de2_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf'
+ elif instrument == 'fpi':
+ pathformat = 'neutral_gas_fpi/de2_neutral8s_fpi/%Y/de2_neutral'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf'
+ elif instrument == 'idm':
+ pathformat = 'plasma_idm/vion250ms_cdaweb/%Y/de2_vion'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf'
+ elif instrument == 'wats':
+ pathformat = 'neutral_gas_wats/wind2s_wats_cdaweb/%Y/de2_wind'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf'
+ elif instrument == 'vefi':
+ pathformat = 'electric_fields_vefi/'+datatype+'_vefi_cdaweb/%Y/de2_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf'
+ elif instrument == 'lang':
+ pathformat = 'plasma_lang/plasma500ms_lang_cdaweb/%Y/de2_plasma'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf'
+
+ # find the full remote path names using the trange
+ remote_names = dailynames(file_format=pathformat, trange=trange)
+
+ out_files = []
+
+ files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update)
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ out_files = sorted(out_files)
+
+ if downloadonly:
+ return out_files
+
+ tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
+
+ if notplot:
+ return tvars
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
+
+
\ No newline at end of file
diff --git a/pyspedas/de2/tests/__init__.py b/pyspedas/de2/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/de2/tests/tests.py b/pyspedas/de2/tests/tests.py
new file mode 100644
index 00000000..69b43d09
--- /dev/null
+++ b/pyspedas/de2/tests/tests.py
@@ -0,0 +1,62 @@
+import os
+import unittest
+from pytplot import data_exists
+import pyspedas
+
+
+class LoadTestCases(unittest.TestCase):
+ def test_load_mag_data(self):
+ out_vars = pyspedas.de2.mag(time_clip=True)
+ self.assertTrue(data_exists('bx'))
+ self.assertTrue(data_exists('by'))
+ self.assertTrue(data_exists('bz'))
+
+ def test_load_nacs_data(self):
+ out_vars = pyspedas.de2.nacs()
+ self.assertTrue(data_exists('O_density'))
+ self.assertTrue(data_exists('N_density'))
+
+ def test_load_rpa_data(self):
+ out_vars = pyspedas.de2.rpa()
+ self.assertTrue(data_exists('ionDensity'))
+ self.assertTrue(data_exists('ionTemperature'))
+
+ def test_load_fpi_data(self):
+ out_vars = pyspedas.de2.fpi()
+ self.assertTrue(data_exists('TnF'))
+
+ # issue with the CDFs here, 2Nov2022
+ # def test_load_idm_data(self):
+ # out_vars = pyspedas.de2.idm()
+ # self.assertTrue(data_exists('ionVelocityY'))
+ # self.assertTrue(data_exists('ionVelocityZ'))
+
+ def test_load_wats_data(self):
+ out_vars = pyspedas.de2.wats()
+ self.assertTrue(data_exists('density'))
+ self.assertTrue(data_exists('Tn'))
+
+ def test_load_vefi_data(self):
+ out_vars = pyspedas.de2.vefi()
+ self.assertTrue(data_exists('spectA'))
+ self.assertTrue(data_exists('spectB'))
+ self.assertTrue(data_exists('spectC'))
+
+ def test_load_lang_data(self):
+ out_vars = pyspedas.de2.lang()
+ self.assertTrue(data_exists('plasmaDensity'))
+ self.assertTrue(data_exists('electronTemp'))
+
+ def test_load_notplot(self):
+ out_vars = pyspedas.de2.mag(notplot=True)
+ self.assertTrue('bz' in out_vars)
+
+ def test_downloadonly(self):
+ files = pyspedas.de2.mag(downloadonly=True, trange=['1983-2-16', '1983-2-17'])
+ self.assertTrue(os.path.exists(files[0]))
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+
\ No newline at end of file
diff --git a/pyspedas/dscovr/__init__.py b/pyspedas/dscovr/__init__.py
index 7da48dca..de2b50c4 100644
--- a/pyspedas/dscovr/__init__.py
+++ b/pyspedas/dscovr/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def mag(trange=['2018-10-16', '2018-10-17'],
datatype='h0',
@@ -267,4 +268,8 @@ def all(trange=['2018-10-16', '2018-10-17'], downloadonly=False, suffix='', no_u
orb_vars = orb(trange=trange, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
mag_vars = mag(trange=trange, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
fc_vars = fc(trange=trange, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
- return att_vars + orb_vars + mag_vars + fc_vars
\ No newline at end of file
+ return att_vars + orb_vars + mag_vars + fc_vars
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='DSCOVR', instrument=instrument, label=label)
diff --git a/pyspedas/dscovr/load.py b/pyspedas/dscovr/load.py
index 5b7d333c..10b1397c 100644
--- a/pyspedas/dscovr/load.py
+++ b/pyspedas/dscovr/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/dscovr/tests/tests.py b/pyspedas/dscovr/tests/tests.py
index 8bdc40e8..c7abb5e3 100644
--- a/pyspedas/dscovr/tests/tests.py
+++ b/pyspedas/dscovr/tests/tests.py
@@ -1,15 +1,16 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_load_mag_data(self):
mag_vars = pyspedas.dscovr.mag(time_clip=True)
self.assertTrue(data_exists('dsc_h0_mag_B1RTN'))
self.assertTrue(data_exists('dsc_h0_mag_B1GSE'))
+ mag_vars = pyspedas.dscovr.mag(notplot=True)
+ self.assertTrue('dsc_h0_mag_B1GSE' in mag_vars)
def test_load_fc_data(self):
fc_vars = pyspedas.dscovr.fc()
@@ -37,4 +38,4 @@ def test_load_all(self):
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/equator_s/__init__.py b/pyspedas/equator_s/__init__.py
index 898cf6fa..9c4cb1c4 100644
--- a/pyspedas/equator_s/__init__.py
+++ b/pyspedas/equator_s/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def mam(trange=['1998-04-06', '1998-04-07'],
datatype='pp',
@@ -422,3 +423,7 @@ def sfd(trange=['1998-01-26', '1998-01-27'],
"""
return load(instrument='sfd', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Equator-S', instrument=instrument, label=label)
diff --git a/pyspedas/equator_s/load.py b/pyspedas/equator_s/load.py
index 049e90a0..0babb7ca 100644
--- a/pyspedas/equator_s/load.py
+++ b/pyspedas/equator_s/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/equator_s/tests/tests.py b/pyspedas/equator_s/tests/tests.py
index 9d2c0d76..4d4fa95c 100644
--- a/pyspedas/equator_s/tests/tests.py
+++ b/pyspedas/equator_s/tests/tests.py
@@ -1,14 +1,20 @@
-
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
+ def test_load_notplot(self):
+ mam_vars = pyspedas.equator_s.mam(notplot=True)
+ self.assertTrue('B_xyz_gse%eq_pp_mam' in mam_vars)
+
def test_load_mam_data(self):
- mam_vars = pyspedas.equator_s.mam()
+ mam_vars = pyspedas.equator_s.mam(time_clip=True)
self.assertTrue(data_exists('B_xyz_gse%eq_pp_mam'))
+ def test_load_esa_downloadonly(self):
+ esa = pyspedas.equator_s.esa(downloadonly=True)
+
def test_load_edi_data(self):
edi_vars = pyspedas.equator_s.edi()
self.assertTrue(data_exists('V_ed_xyz_gse%eq_pp_edi'))
@@ -31,5 +37,6 @@ def test_load_sfd_data(self):
sfd_vars = pyspedas.equator_s.sfd()
self.assertTrue(data_exists('F_e>0.26%eq_sp_sfd'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/erg/satellite/erg/att/att.py b/pyspedas/erg/satellite/erg/att/att.py
index df44d9ab..b4184a2b 100644
--- a/pyspedas/erg/satellite/erg/att/att.py
+++ b/pyspedas/erg/satellite/erg/att/att.py
@@ -1,5 +1,5 @@
import pandas as pd
-from pyspedas.utilities.time_double import time_float
+from pytplot import time_float
from pytplot import store_data
from ..load import load
diff --git a/pyspedas/erg/satellite/erg/common/cotrans/cart_trans_matrix_make.py b/pyspedas/erg/satellite/erg/common/cotrans/cart_trans_matrix_make.py
index ff375809..b1fedcf4 100644
--- a/pyspedas/erg/satellite/erg/common/cotrans/cart_trans_matrix_make.py
+++ b/pyspedas/erg/satellite/erg/common/cotrans/cart_trans_matrix_make.py
@@ -1,5 +1,5 @@
import numpy as np
-from pyspedas.analysis.tnormalize import tnormalize
+from pytplot import tnormalize
def cart_trans_matrix_make(x, y, z):
diff --git a/pyspedas/erg/satellite/erg/common/cotrans/dsi2j2000.py b/pyspedas/erg/satellite/erg/common/cotrans/dsi2j2000.py
index 619182c0..7edfe42f 100644
--- a/pyspedas/erg/satellite/erg/common/cotrans/dsi2j2000.py
+++ b/pyspedas/erg/satellite/erg/common/cotrans/dsi2j2000.py
@@ -1,9 +1,9 @@
import numpy as np
from pyspedas import tinterpol
-from pyspedas.analysis.tcrossp import tcrossp
-from pyspedas.analysis.tnormalize import tnormalize
+from pytplot import tcrossp
+from pytplot import tnormalize
from pyspedas.cotrans.cotrans import cotrans
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_string
from pytplot import get_data, get_timespan, options, store_data, tplot_names
from ...orb.orb import orb
diff --git a/pyspedas/erg/satellite/erg/common/cotrans/erg_interpolate_att.py b/pyspedas/erg/satellite/erg/common/cotrans/erg_interpolate_att.py
index 522c99b0..9b249a6d 100644
--- a/pyspedas/erg/satellite/erg/common/cotrans/erg_interpolate_att.py
+++ b/pyspedas/erg/satellite/erg/common/cotrans/erg_interpolate_att.py
@@ -1,7 +1,7 @@
import numpy as np
from pyspedas import tnames
-from pyspedas.analysis.tcrossp import tcrossp
-from pyspedas.utilities.time_string import time_string
+from pytplot import tcrossp
+from pytplot import time_string
from pytplot import get_data, get_timespan
from pytplot.tplot_math.degap import degap
from scipy import interpolate
diff --git a/pyspedas/erg/satellite/erg/lepe/lepe.py b/pyspedas/erg/satellite/erg/lepe/lepe.py
index 778b5a2c..6bfa38e3 100644
--- a/pyspedas/erg/satellite/erg/lepe/lepe.py
+++ b/pyspedas/erg/satellite/erg/lepe/lepe.py
@@ -3,7 +3,8 @@
import numpy as np
from pytplot import clip, get_data, options, store_data, ylim, zlim
-from pyspedas.utilities.time_double import time_double
+from pytplot import time_double
+
from ..load import load
diff --git a/pyspedas/erg/satellite/erg/load.py b/pyspedas/erg/satellite/erg/load.py
index 7f5c884f..1c95b7df 100644
--- a/pyspedas/erg/satellite/erg/load.py
+++ b/pyspedas/erg/satellite/erg/load.py
@@ -1,6 +1,6 @@
import cdflib
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
from pytplot import cdf_to_tplot
diff --git a/pyspedas/erg/satellite/erg/particle/erg_hep_get_dist.py b/pyspedas/erg/satellite/erg/particle/erg_hep_get_dist.py
index cfc87bf1..89ca0feb 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_hep_get_dist.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_hep_get_dist.py
@@ -6,8 +6,8 @@
from copy import deepcopy
from scipy.spatial import KDTree
from pyspedas import tnames
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pytplot import get_data
from scipy import interpolate
diff --git a/pyspedas/erg/satellite/erg/particle/erg_hep_part_products.py b/pyspedas/erg/satellite/erg/particle/erg_hep_part_products.py
index 3b5bf6a5..aff1a659 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_hep_part_products.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_hep_part_products.py
@@ -3,9 +3,8 @@
from copy import deepcopy
from pyspedas import tnames, tinterpol, tcopy
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
-
+from pytplot import time_double
+from pytplot import time_string
from pyspedas.particles.moments.spd_pgs_moments import spd_pgs_moments
from pyspedas.particles.spd_part_products.spd_pgs_regrid import spd_pgs_regrid
from pytplot import get_timespan, get_data, store_data
diff --git a/pyspedas/erg/satellite/erg/particle/erg_lep_part_products.py b/pyspedas/erg/satellite/erg/particle/erg_lep_part_products.py
index 641925bf..d44d62bd 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_lep_part_products.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_lep_part_products.py
@@ -3,8 +3,8 @@
from copy import deepcopy
from pyspedas import tnames, tinterpol, tcopy
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pyspedas.particles.moments.spd_pgs_moments import spd_pgs_moments
from pyspedas.particles.spd_part_products.spd_pgs_regrid import spd_pgs_regrid
diff --git a/pyspedas/erg/satellite/erg/particle/erg_lepe_get_dist.py b/pyspedas/erg/satellite/erg/particle/erg_lepe_get_dist.py
index 0a2a1faa..7edf5aa2 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_lepe_get_dist.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_lepe_get_dist.py
@@ -6,8 +6,8 @@
from copy import deepcopy
from scipy.spatial import KDTree
from pyspedas import tnames
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pytplot import get_data
from scipy import interpolate
diff --git a/pyspedas/erg/satellite/erg/particle/erg_lepi_get_dist.py b/pyspedas/erg/satellite/erg/particle/erg_lepi_get_dist.py
index cd64081c..ecf7043e 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_lepi_get_dist.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_lepi_get_dist.py
@@ -3,8 +3,8 @@
import numpy as np
from pyspedas import tnames
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pytplot import get_data
from scipy import interpolate
diff --git a/pyspedas/erg/satellite/erg/particle/erg_mep_part_products.py b/pyspedas/erg/satellite/erg/particle/erg_mep_part_products.py
index 1bdc68ac..cc416fba 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_mep_part_products.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_mep_part_products.py
@@ -3,8 +3,8 @@
from copy import deepcopy
from pyspedas import tnames, tinterpol
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pyspedas.particles.moments.spd_pgs_moments import spd_pgs_moments
from pyspedas.particles.spd_part_products.spd_pgs_regrid import spd_pgs_regrid
diff --git a/pyspedas/erg/satellite/erg/particle/erg_mepe_get_dist.py b/pyspedas/erg/satellite/erg/particle/erg_mepe_get_dist.py
index a19e3557..be329f9c 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_mepe_get_dist.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_mepe_get_dist.py
@@ -3,8 +3,8 @@
import numpy as np
from pyspedas import tnames
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pytplot import get_data
from scipy import interpolate
diff --git a/pyspedas/erg/satellite/erg/particle/erg_mepi_get_dist.py b/pyspedas/erg/satellite/erg/particle/erg_mepi_get_dist.py
index 501a3edc..91065726 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_mepi_get_dist.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_mepi_get_dist.py
@@ -3,8 +3,8 @@
import numpy as np
from pyspedas import tnames
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pytplot import get_data
from scipy import interpolate
diff --git a/pyspedas/erg/satellite/erg/particle/erg_pgs_make_fac.py b/pyspedas/erg/satellite/erg/particle/erg_pgs_make_fac.py
index 268a8822..75ece49b 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_pgs_make_fac.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_pgs_make_fac.py
@@ -4,8 +4,8 @@
from pytplot import get_data, store_data
from pyspedas.cotrans.cotrans import cotrans
-from pyspedas.analysis.tnormalize import tnormalize
-from pyspedas.analysis.tcrossp import tcrossp
+from pytplot import tnormalize
+from pytplot import tcrossp
from pyspedas.analysis.tinterpol import tinterpol
from ..common.cotrans.erg_cotrans import erg_cotrans
diff --git a/pyspedas/erg/satellite/erg/particle/erg_xep_get_dist.py b/pyspedas/erg/satellite/erg/particle/erg_xep_get_dist.py
index 491424ba..9b110689 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_xep_get_dist.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_xep_get_dist.py
@@ -4,8 +4,8 @@
import numpy as np
from copy import deepcopy
from pyspedas import tnames
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pytplot import get_data
from scipy import interpolate
diff --git a/pyspedas/erg/satellite/erg/particle/erg_xep_part_products.py b/pyspedas/erg/satellite/erg/particle/erg_xep_part_products.py
index aec1ce2e..1d39281c 100644
--- a/pyspedas/erg/satellite/erg/particle/erg_xep_part_products.py
+++ b/pyspedas/erg/satellite/erg/particle/erg_xep_part_products.py
@@ -3,8 +3,8 @@
from copy import deepcopy
from pyspedas import tnames, tinterpol
-from pyspedas.utilities.time_double import time_double
-from pyspedas.utilities.time_string import time_string
+from pytplot import time_double
+from pytplot import time_string
from pyspedas.particles.moments.spd_pgs_moments import spd_pgs_moments
from pyspedas.particles.spd_part_products.spd_pgs_regrid import spd_pgs_regrid
diff --git a/pyspedas/erg/satellite/erg/pwe/pwe_efd.py b/pyspedas/erg/satellite/erg/pwe/pwe_efd.py
index 11b5df9e..de4fb676 100644
--- a/pyspedas/erg/satellite/erg/pwe/pwe_efd.py
+++ b/pyspedas/erg/satellite/erg/pwe/pwe_efd.py
@@ -1,7 +1,7 @@
import cdflib
import numpy as np
from pyspedas import tnames
-from pyspedas.utilities.time_double import time_float
+from pytplot import time_float
from pytplot import get_data, options, store_data, ylim, zlim
from ..load import load
diff --git a/pyspedas/erg/satellite/erg/pwe/pwe_wfc.py b/pyspedas/erg/satellite/erg/pwe/pwe_wfc.py
index de99b8ec..720abb8e 100644
--- a/pyspedas/erg/satellite/erg/pwe/pwe_wfc.py
+++ b/pyspedas/erg/satellite/erg/pwe/pwe_wfc.py
@@ -1,7 +1,7 @@
import cdflib
import numpy as np
from pyspedas import tnames
-from pyspedas.utilities.time_double import time_float
+from pytplot import time_float
from pytplot import clip, get_data, options, store_data, ylim, zlim
from ..load import load
diff --git a/pyspedas/erg/tests/tests.py b/pyspedas/erg/tests/tests.py
index 80c70ad9..4cb3d5f6 100644
--- a/pyspedas/erg/tests/tests.py
+++ b/pyspedas/erg/tests/tests.py
@@ -1,7 +1,7 @@
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
import pyspedas
diff --git a/pyspedas/fast/__init__.py b/pyspedas/fast/__init__.py
index 281e908a..b2c1a4c8 100644
--- a/pyspedas/fast/__init__.py
+++ b/pyspedas/fast/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def dcb(trange=['2001-09-05', '2001-09-06'],
datatype='',
@@ -244,3 +245,7 @@ def teams(trange=['1998-09-05', '1998-09-06'],
"""
return load(instrument='teams', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='FAST', instrument=instrument, label=label)
diff --git a/pyspedas/fast/load.py b/pyspedas/fast/load.py
index fae18738..3c0adc64 100644
--- a/pyspedas/fast/load.py
+++ b/pyspedas/fast/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/fast/tests/tests.py b/pyspedas/fast/tests/tests.py
index 074c1d33..67c4aaf6 100644
--- a/pyspedas/fast/tests/tests.py
+++ b/pyspedas/fast/tests/tests.py
@@ -1,10 +1,9 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_downloadonly(self):
files = pyspedas.fast.acb(trange=['1999-09-22', '1999-09-23'], time_clip=True, level='k0', downloadonly=True)
@@ -15,16 +14,16 @@ def test_load_dcb_data(self):
self.assertTrue(data_exists('BX'))
self.assertTrue(data_exists('BY'))
self.assertTrue(data_exists('BZ'))
+ dcb_vars = pyspedas.fast.dcb(trange=['1998-09-22', '1998-09-23'], level='l2')
def test_load_acb_data(self):
dcb_vars = pyspedas.fast.acb(trange=['1999-09-22', '1999-09-23'], time_clip=True, level='k0')
self.assertTrue(data_exists('HF_PWR'))
self.assertTrue(data_exists('HF_E_SPEC'))
- # as of 8 Feb 2020, these data fail to load in IDL and Python
- # def test_load_esa_data(self):
- # esa_vars = pyspedas.fast.esa()
- # self.assertTrue(data_exists(''))
+ def test_load_esa_data(self):
+ esa_vars = pyspedas.fast.esa(notplot=True, trange=['1998-09-05/02:00', '1998-09-05/02:30'])
+ self.assertTrue('eflux' in esa_vars)
def test_load_teams_data(self):
teams_vars = pyspedas.fast.teams()
@@ -32,5 +31,6 @@ def test_load_teams_data(self):
self.assertTrue(data_exists('O+'))
self.assertTrue(data_exists('He+'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/geopack/get_tsy_params.py b/pyspedas/geopack/get_tsy_params.py
index ffa623cc..baf406d8 100644
--- a/pyspedas/geopack/get_tsy_params.py
+++ b/pyspedas/geopack/get_tsy_params.py
@@ -1,10 +1,19 @@
-
+import logging
import numpy as np
from pyspedas import tinterpol, tdeflag
from pyspedas.geopack.get_w_params import get_w
from pytplot import get_data, store_data
-def get_tsy_params(dst_tvar, imf_tvar, Np_tvar, Vp_tvar, model, pressure_tvar=None, newname=None, speed=False, g_variables=None):
+
+def get_tsy_params(dst_tvar,
+ imf_tvar,
+ Np_tvar,
+ Vp_tvar,
+ model,
+ pressure_tvar=None,
+ newname=None,
+ speed=False,
+ g_variables=None):
"""
This procedure will interpolate inputs, generate
Tsyganenko model parameters and store them in a tplot
@@ -63,7 +72,7 @@ def get_tsy_params(dst_tvar, imf_tvar, Np_tvar, Vp_tvar, model, pressure_tvar=No
model = model.lower()
if model not in ['t89', 't96', 't01', 'ts04']:
- print('Unknown model: ' + model)
+ logging.error('Unknown model: ' + model)
return
tdeflag(Np_tvar, method='remove_nan', overwrite=True)
@@ -100,14 +109,14 @@ def get_tsy_params(dst_tvar, imf_tvar, Np_tvar, Vp_tvar, model, pressure_tvar=No
np.zeros(len(dst_data.y))))
elif model == 't01':
if g_variables is None:
- print('G variables required for T01 model; create a tplot variable containing the G variables, and provide the name of that keyword to the g_variables keyword.')
+ logging.error('G variables required for T01 model; create a tplot variable containing the G variables, and provide the name of that keyword to the g_variables keyword.')
return
else:
if isinstance(g_variables, str):
g_data = get_data(g_variables)
if g_data is None:
- print('Problem reading G variable: ' + g_variables)
+ logging.error('Problem reading G variable: ' + g_variables)
return
g1 = g_data.y[:, 0]
@@ -140,7 +149,7 @@ def get_tsy_params(dst_tvar, imf_tvar, Np_tvar, Vp_tvar, model, pressure_tvar=No
w_data = get_data(params+'_interp')
if w_data is None:
- print('Problem loading W variables for TS04 model.')
+ logging.error('Problem loading W variables for TS04 model.')
return
out = np.array((P_data.y,
@@ -153,9 +162,6 @@ def get_tsy_params(dst_tvar, imf_tvar, Np_tvar, Vp_tvar, model, pressure_tvar=No
w_data.y[:, 3],
w_data.y[:, 4],
w_data.y[:, 5]))
- elif model == 't01':
- print('not implemented yet')
- return
if newname is None:
newname = model + '_par'
@@ -164,5 +170,3 @@ def get_tsy_params(dst_tvar, imf_tvar, Np_tvar, Vp_tvar, model, pressure_tvar=No
if saved:
return newname
-
-
diff --git a/pyspedas/geopack/get_w_params.py b/pyspedas/geopack/get_w_params.py
index 1bbb4476..13dd3a88 100644
--- a/pyspedas/geopack/get_w_params.py
+++ b/pyspedas/geopack/get_w_params.py
@@ -1,4 +1,5 @@
-
+import logging
+import warnings
import numpy as np
import pandas as pd
import zipfile
@@ -8,6 +9,7 @@
from pyspedas.utilities.download import download
from pytplot import store_data
+
def get_w(trange=None, create_tvar=False, newname=None):
"""
This routine downloads the 6 Tsygeneko (TS05) model
@@ -17,7 +19,7 @@ def get_w(trange=None, create_tvar=False, newname=None):
"""
if trange is None:
- print('trange keyword must be specified.')
+ logging.error('trange keyword must be specified.')
return
years = dailynames(trange=trange, file_format='%Y')
@@ -35,9 +37,16 @@ def get_w(trange=None, create_tvar=False, newname=None):
w6_out = np.empty(0)
for year in years:
- file = download(remote_path='http://geo.phys.spbu.ru/~tsyganenko/TS05_data_and_stuff/',
- remote_file=year+'_OMNI_5m_with_TS05_variables.???',
- local_path=tmpdir)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ file = download(remote_path='https://geo.phys.spbu.ru/~tsyganenko/TS05_data_and_stuff/',
+ remote_file=year+'_OMNI_5m_with_TS05_variables.???',
+ local_path=tmpdir,
+ verify=False)
+
+ if len(file) == 0:
+ logging.error('No files found for ' + year)
+ continue
if file[0][-3:] == 'zip':
with zipfile.ZipFile(file[0], 'r') as zip_ref:
@@ -72,7 +81,7 @@ def get_w(trange=None, create_tvar=False, newname=None):
in_range = np.argwhere((ut_out >= time_double(trange[0])) & (ut_out < time_double(trange[1]))).squeeze()
if len(in_range) == 0:
- print('No data found in the trange.')
+ logging.error('No data found in the trange.')
return
if create_tvar:
@@ -86,4 +95,4 @@ def get_w(trange=None, create_tvar=False, newname=None):
'w3': w3_out[in_range],
'w4': w4_out[in_range],
'w5': w5_out[in_range],
- 'w6': w6_out[in_range]}
\ No newline at end of file
+ 'w6': w6_out[in_range]}
diff --git a/pyspedas/geopack/t01.py b/pyspedas/geopack/t01.py
index 0ba2ab5e..15ca0919 100644
--- a/pyspedas/geopack/t01.py
+++ b/pyspedas/geopack/t01.py
@@ -1,8 +1,9 @@
-
+import logging
import numpy as np
from pytplot import get_data, store_data
from geopack import geopack, t01
+
def tt01(pos_var_gsm, parmod=None, suffix=''):
"""
tplot wrapper for the functional interface to Sheng Tian's implementation of the Tsyganenko 2001 and IGRF model:
@@ -35,7 +36,7 @@ def tt01(pos_var_gsm, parmod=None, suffix=''):
pos_data = get_data(pos_var_gsm)
if pos_data is None:
- print('Variable not found: ' + pos_var_gsm)
+ logging.error('Variable not found: ' + pos_var_gsm)
return
b0gsm = np.zeros((len(pos_data.times), 3))
@@ -50,7 +51,7 @@ def tt01(pos_var_gsm, parmod=None, suffix=''):
if par is not None:
par = par.y
else:
- print('parmod keyword required.')
+ logging.error('parmod keyword required.')
return
for idx, time in enumerate(pos_data.times):
@@ -67,4 +68,4 @@ def tt01(pos_var_gsm, parmod=None, suffix=''):
saved = store_data(pos_var_gsm + '_bt01' + suffix, data={'x': pos_data.times, 'y': bgsm})
if saved:
- return pos_var_gsm + '_bt01' + suffix
\ No newline at end of file
+ return pos_var_gsm + '_bt01' + suffix
diff --git a/pyspedas/geopack/t89.py b/pyspedas/geopack/t89.py
index 516f6588..cc5a072c 100644
--- a/pyspedas/geopack/t89.py
+++ b/pyspedas/geopack/t89.py
@@ -1,8 +1,9 @@
-
+import logging
import numpy as np
from pytplot import get_data, store_data
from geopack import geopack, t89
+
def tt89(pos_var_gsm, iopt=3, suffix='', igrf_only=False):
"""
tplot wrapper for the functional interface to Sheng Tian's implementation
@@ -33,7 +34,7 @@ def tt89(pos_var_gsm, iopt=3, suffix='', igrf_only=False):
pos_data = get_data(pos_var_gsm)
if pos_data is None:
- print('Variable not found: ' + pos_var_gsm)
+ logging.error('Variable not found: ' + pos_var_gsm)
return
b0gsm = np.zeros((len(pos_data.times), 3))
@@ -59,4 +60,4 @@ def tt89(pos_var_gsm, iopt=3, suffix='', igrf_only=False):
saved = store_data(pos_var_gsm + '_bt89' + suffix, data={'x': pos_data.times, 'y': bgsm})
if saved:
- return pos_var_gsm + '_bt89' + suffix
\ No newline at end of file
+ return pos_var_gsm + '_bt89' + suffix
diff --git a/pyspedas/geopack/t96.py b/pyspedas/geopack/t96.py
index 23ca3605..517fda72 100644
--- a/pyspedas/geopack/t96.py
+++ b/pyspedas/geopack/t96.py
@@ -1,8 +1,9 @@
-
+import logging
import numpy as np
from pytplot import get_data, store_data
from geopack import geopack, t96
+
def tt96(pos_var_gsm, parmod=None, suffix=''):
"""
tplot wrapper for the functional interface to Sheng Tian's implementation of the Tsyganenko 96 and IGRF model:
@@ -33,7 +34,7 @@ def tt96(pos_var_gsm, parmod=None, suffix=''):
pos_data = get_data(pos_var_gsm)
if pos_data is None:
- print('Variable not found: ' + pos_var_gsm)
+ logging.error('Variable not found: ' + pos_var_gsm)
return
b0gsm = np.zeros((len(pos_data.times), 3))
@@ -48,7 +49,7 @@ def tt96(pos_var_gsm, parmod=None, suffix=''):
if par is not None:
par = par.y
else:
- print('parmod keyword required.')
+ logging.error('parmod keyword required.')
return
for idx, time in enumerate(pos_data.times):
@@ -65,4 +66,4 @@ def tt96(pos_var_gsm, parmod=None, suffix=''):
saved = store_data(pos_var_gsm + '_bt96' + suffix, data={'x': pos_data.times, 'y': bgsm})
if saved:
- return pos_var_gsm + '_bt96' + suffix
\ No newline at end of file
+ return pos_var_gsm + '_bt96' + suffix
diff --git a/pyspedas/geopack/tests/tests.py b/pyspedas/geopack/tests/tests.py
index 9fbeb7fa..06509443 100644
--- a/pyspedas/geopack/tests/tests.py
+++ b/pyspedas/geopack/tests/tests.py
@@ -1,7 +1,8 @@
-
-import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
+
+import numpy as np
+
+from pytplot import data_exists
import pyspedas
from pyspedas import time_double
from pyspedas.geopack import tt89
@@ -9,25 +10,35 @@
from pyspedas.geopack import tt01
from pyspedas.geopack import tts04
from pyspedas.geopack.get_tsy_params import get_tsy_params
+from pyspedas.geopack.get_w_params import get_w
from pyspedas import tinterpol
-from pytplot import join_vec
+from pytplot import join_vec, store_data, get_data
trange = ['2015-10-16', '2015-10-17']
-def get_params(model):
+
+def get_params(model, g_variables=None):
support_trange = [time_double(trange[0])-60*60*24,
time_double(trange[1])+60*60*24]
pyspedas.kyoto.dst(trange=support_trange)
pyspedas.omni.data(trange=trange)
join_vec(['BX_GSE', 'BY_GSM', 'BZ_GSM'])
- return get_tsy_params('kyoto_dst',
- 'BX_GSE-BY_GSM-BZ_GSM_joined',
- 'proton_density',
- 'flow_speed',
- model,
+ if model == 't01' and g_variables is None:
+ g_variables = [6.0, 10.0]
+ else:
+ if g_variables is not None:
+ if not isinstance(g_variables, str) and not isinstance(g_variables, np.ndarray):
+ g_variables = None
+ return get_tsy_params('kyoto_dst',
+ 'BX_GSE-BY_GSM-BZ_GSM_joined',
+ 'proton_density',
+ 'flow_speed',
+ model,
pressure_tvar='Pressure',
+ g_variables=g_variables,
speed=True)
+
class LoadTestCases(unittest.TestCase):
def test_igrf(self):
mec_vars = pyspedas.mms.mec(trange=trange)
@@ -52,6 +63,17 @@ def test_tt01(self):
tinterpol('mms1_mec_r_gsm', 'proton_density')
tt01('mms1_mec_r_gsm-itrp', parmod=params)
self.assertTrue(data_exists('mms1_mec_r_gsm-itrp_bt01'))
+ mec = get_data('mms1_mec_r_gsm-itrp')
+ gvars = np.zeros((len(mec.times), 2))
+ gvars[:, 0] = np.repeat(6.0, len(mec.times))
+ gvars[:, 1] = np.repeat(10.0, len(mec.times))
+ store_data('g_variables', data={'x': mec.times, 'y': gvars})
+ params = get_params('t01', g_variables='g_variables')
+ tt01('mms1_mec_r_gsm-itrp', parmod=params)
+ self.assertTrue(data_exists('mms1_mec_r_gsm-itrp_bt01'))
+ params = get_params('t01', g_variables=gvars)
+ tt01('mms1_mec_r_gsm-itrp', parmod=params)
+ self.assertTrue(data_exists('mms1_mec_r_gsm-itrp_bt01'))
def test_tts04(self):
mec_vars = pyspedas.mms.mec(trange=trange)
@@ -60,5 +82,27 @@ def test_tts04(self):
tts04('mms1_mec_r_gsm-itrp', parmod=params)
self.assertTrue(data_exists('mms1_mec_r_gsm-itrp_bts04'))
+ def test_get_w(self):
+ w_vals = get_w(trange=['2015-10-16', '2015-10-17'])
+
+ def test_errors(self):
+ # exercise some of the error code
+ mec_vars = pyspedas.mms.mec(trange=trange)
+ params = get_params('ts04')
+ tinterpol('mms1_mec_r_gsm', 'proton_density')
+ tts04('var_doesnt_exist')
+ tts04('mms1_mec_r_gsm-itrp', parmod=None)
+ tt01('var_doesnt_exist')
+ tt01('mms1_mec_r_gsm-itrp', parmod=None)
+ tt96('var_doesnt_exist')
+ tt96('mms1_mec_r_gsm-itrp', parmod=None)
+ tt89('var_doesnt_exist')
+ invalidmodel = get_params('89')
+ invalidg = get_params('t01', g_variables=1)
+ invalidg = get_params('t01', g_variables='g_vars')
+ notrange = get_w() # no trange
+ invalidtrange = get_w(trange=['2050-01-01', '2050-01-02'])
+
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/geopack/ts04.py b/pyspedas/geopack/ts04.py
index 87f34e0c..daa5f7e6 100644
--- a/pyspedas/geopack/ts04.py
+++ b/pyspedas/geopack/ts04.py
@@ -1,8 +1,9 @@
-
+import logging
import numpy as np
from pytplot import get_data, store_data
from geopack import geopack, t04
+
def tts04(pos_var_gsm, parmod=None, suffix=''):
"""
tplot wrapper for the functional interface to Sheng Tian's implementation of the
@@ -36,7 +37,7 @@ def tts04(pos_var_gsm, parmod=None, suffix=''):
pos_data = get_data(pos_var_gsm)
if pos_data is None:
- print('Variable not found: ' + pos_var_gsm)
+ logging.error('Variable not found: ' + pos_var_gsm)
return
b0gsm = np.zeros((len(pos_data.times), 3))
@@ -51,7 +52,7 @@ def tts04(pos_var_gsm, parmod=None, suffix=''):
if par is not None:
par = par.y
else:
- print('parmod keyword required.')
+ logging.error('parmod keyword required.')
return
for idx, time in enumerate(pos_data.times):
@@ -72,4 +73,4 @@ def tts04(pos_var_gsm, parmod=None, suffix=''):
saved = store_data(pos_var_gsm + '_bts04' + suffix, data={'x': pos_data.times, 'y': bgsm})
if saved:
- return pos_var_gsm + '_bts04' + suffix
\ No newline at end of file
+ return pos_var_gsm + '_bts04' + suffix
diff --git a/pyspedas/geotail/__init__.py b/pyspedas/geotail/__init__.py
index aa496f8e..e5b4edae 100644
--- a/pyspedas/geotail/__init__.py
+++ b/pyspedas/geotail/__init__.py
@@ -1,6 +1,7 @@
-
from .load import load
from pytplot import options
+from pyspedas.utilities.datasets import find_datasets
+
def mgf(trange=['2018-11-5', '2018-11-6'],
datatype='k0',
@@ -373,3 +374,7 @@ def pwi(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='pwi', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Geotail', instrument=instrument, label=label)
diff --git a/pyspedas/geotail/load.py b/pyspedas/geotail/load.py
index f27b097b..18926c11 100644
--- a/pyspedas/geotail/load.py
+++ b/pyspedas/geotail/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/geotail/tests/tests.py b/pyspedas/geotail/tests/tests.py
index 5d8d9300..c75683aa 100644
--- a/pyspedas/geotail/tests/tests.py
+++ b/pyspedas/geotail/tests/tests.py
@@ -1,7 +1,7 @@
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
import pyspedas
@@ -32,6 +32,8 @@ def test_load_cpi_data(self):
def test_load_epic_data(self):
epic_vars = pyspedas.geotail.epic()
self.assertTrue(data_exists('IDiffI_I'))
+ epic_vars = pyspedas.geotail.epic(notplot=True)
+ self.assertTrue('IDiffI_I' in epic_vars)
def test_load_pwi_data(self):
pwi_vars = pyspedas.geotail.pwi()
diff --git a/pyspedas/goes/__init__.py b/pyspedas/goes/__init__.py
index 25b6b526..08850a56 100644
--- a/pyspedas/goes/__init__.py
+++ b/pyspedas/goes/__init__.py
@@ -1,21 +1,81 @@
+from .load import load, loadr
+from .load_orbit import load_orbit
+
+
+def orbit(trange=['2013-11-5', '2013-11-6'],
+ probe='15',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ notplot=False,
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ time_clip=True):
+ """
+
+ This function loads GOES orbit data (probes 8-18)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str/int or list of strs/ints
+ GOES spacecraft #, e.g., probe=15
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
+ suffix: str
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ If set, load the data into dictionaries containing the numpy objects instead
+ of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created. Or list of filenames downloaded.
+
+ """
+ return load_orbit(trange=trange, probe=probe, varnames=varnames, varformat=varformat,
+ prefix=prefix, suffix=suffix, downloadonly=downloadonly,
+ no_update=no_update, time_clip=time_clip, notplot=notplot, get_support_data=get_support_data)
-from .load import load
-def fgm(trange=['2013-11-5', '2013-11-6'],
+def fgm(trange=['2013-11-5', '2013-11-6'],
probe='15',
- datatype='1min',
- suffix='',
+ datatype='1min',
+ prefix='',
+ suffix='',
downloadonly=False,
no_update=False,
time_clip=False):
"""
- This function loads data from the GOES Magnetometer
-
+ This function loads data from the GOES Magnetometer (probes 8-15)
+
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe: str/int or list of strs/ints
@@ -24,12 +84,17 @@ def fgm(trange=['2013-11-5', '2013-11-6'],
datatype: str
Data type; Valid options:
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
suffix: str
- The tplot variable names will be given this suffix. By default,
- no suffix is added.
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
downloadonly: bool
- Set this flag to download the CDF files, but not load them into
+ Set this flag to download the CDF files, but not load them into
tplot variables
no_update: bool
@@ -40,26 +105,29 @@ def fgm(trange=['2013-11-5', '2013-11-6'],
Returns
----------
- List of tplot variables created.
+ List of tplot variables created. Or list of filenames downloaded.
"""
- return load(instrument='fgm', trange=trange, probe=probe, datatype=datatype, suffix=suffix, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+ return load(instrument='fgm', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix, downloadonly=downloadonly,
+ time_clip=time_clip, no_update=no_update)
+
-def eps(trange=['2013-11-5', '2013-11-6'],
+def eps(trange=['2013-11-5', '2013-11-6'],
probe='12',
- datatype='1min',
- suffix='',
+ datatype='1min',
+ prefix='',
+ suffix='',
downloadonly=False,
no_update=False,
time_clip=False):
"""
- This function loads data from the GOES energetic particle sensor
-
+ This function loads data from the GOES energetic particle sensor (probes 8-15)
+
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe: str/int or list of strs/ints
@@ -68,12 +136,17 @@ def eps(trange=['2013-11-5', '2013-11-6'],
datatype: str
Data type; Valid options:
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
suffix: str
- The tplot variable names will be given this suffix. By default,
- no suffix is added.
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
downloadonly: bool
- Set this flag to download the CDF files, but not load them into
+ Set this flag to download the CDF files, but not load them into
tplot variables
no_update: bool
@@ -84,26 +157,29 @@ def eps(trange=['2013-11-5', '2013-11-6'],
Returns
----------
- List of tplot variables created.
+ List of tplot variables created. Or list of filenames downloaded.
"""
- return load(instrument='eps', trange=trange, probe=probe, datatype=datatype, suffix=suffix, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
-
-def epead(trange=['2013-11-5', '2013-11-6'],
- probe='15',
- datatype='1min',
- suffix='',
- downloadonly=False,
- no_update=False,
- time_clip=False):
+ return load(instrument='eps', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+
+def epead(trange=['2013-11-5', '2013-11-6'],
+ probe='15',
+ datatype='1min',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
"""
- This function loads data from the GOES Electron, Proton, Alpha Detector
-
+ This function loads data from the GOES Electron, Proton, Alpha Detector (probes 8-15)
+
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe: str/int or list of strs/ints
@@ -112,12 +188,17 @@ def epead(trange=['2013-11-5', '2013-11-6'],
datatype: str
Data type; Valid options:
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
suffix: str
- The tplot variable names will be given this suffix. By default,
- no suffix is added.
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
downloadonly: bool
- Set this flag to download the CDF files, but not load them into
+ Set this flag to download the CDF files, but not load them into
tplot variables
no_update: bool
@@ -128,40 +209,48 @@ def epead(trange=['2013-11-5', '2013-11-6'],
Returns
----------
- List of tplot variables created.
+ List of tplot variables created. Or list of filenames downloaded.
"""
- return load(instrument='epead', trange=trange, probe=probe, datatype=datatype, suffix=suffix, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
-
-def maged(trange=['2013-11-5', '2013-11-6'],
- probe='15',
- datatype='1min',
- suffix='',
- downloadonly=False,
- no_update=False,
- time_clip=False):
+ return load(instrument='epead', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+
+def maged(trange=['2013-11-5', '2013-11-6'],
+ probe='15',
+ datatype='1min',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
"""
- This function loads data from the GOES Magnetospheric Electron Detector
-
+ This function loads data from the GOES Magnetospheric Electron Detector (probes 8-15)
+
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe: str/int or list of strs/ints
GOES spacecraft #, e.g., probe=15
datatype: str
- Data type; Valid options:
+ Data type; Default '1min'
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
suffix: str
- The tplot variable names will be given this suffix. By default,
- no suffix is added.
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
downloadonly: bool
- Set this flag to download the CDF files, but not load them into
+ Set this flag to download the CDF files, but not load them into
tplot variables
no_update: bool
@@ -172,40 +261,100 @@ def maged(trange=['2013-11-5', '2013-11-6'],
Returns
----------
- List of tplot variables created.
+ List of tplot variables created. Or list of filenames downloaded.
"""
- return load(instrument='maged', trange=trange, probe=probe, datatype=datatype, suffix=suffix, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+ return load(instrument='maged', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+
+def magpd(trange=['2013-11-5', '2013-11-6'],
+ probe='15',
+ datatype='1min',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the GOES Magnetospheric Proton Detector (probes 8-15)
-def magpd(trange=['2013-11-5', '2013-11-6'],
- probe='15',
- datatype='1min',
- suffix='',
- downloadonly=False,
- no_update=False,
- time_clip=False):
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str/int or list of strs/ints
+ GOES spacecraft #, e.g., probe=15
+
+ datatype: str
+ Data type; Default '1min'
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
+ suffix: str
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created. Or list of filenames downloaded.
+
+ """
+ return load(instrument='magpd', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+
+def hepad(trange=['2013-11-5', '2013-11-6'],
+ probe='15',
+ datatype='1min',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
"""
- This function loads data from the GOES Magnetospheric Proton Detector
-
+ This function loads data from the GOES High energy Proton and Alpha Detector (probes 8-15)
+
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe: str/int or list of strs/ints
GOES spacecraft #, e.g., probe=15
datatype: str
- Data type; Valid options:
+ Data type; Default '1min'
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
suffix: str
- The tplot variable names will be given this suffix. By default,
- no suffix is added.
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
downloadonly: bool
- Set this flag to download the CDF files, but not load them into
+ Set this flag to download the CDF files, but not load them into
tplot variables
no_update: bool
@@ -216,40 +365,48 @@ def magpd(trange=['2013-11-5', '2013-11-6'],
Returns
----------
- List of tplot variables created.
+ List of tplot variables created. Or list of filenames downloaded.
"""
- return load(instrument='magpd', trange=trange, probe=probe, datatype=datatype, suffix=suffix, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+ return load(instrument='hepad', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
-def hepad(trange=['2013-11-5', '2013-11-6'],
+
+def xrs(trange=['2013-11-5', '2013-11-6'],
probe='15',
- datatype='1min',
- suffix='',
+ datatype='1min',
+ prefix='',
+ suffix='',
downloadonly=False,
no_update=False,
time_clip=False):
"""
- This function loads data from the GOES High energy Proton and Alpha Detector
-
+ This function loads data from the GOES X-ray Sensor (probes 8-18)
+
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe: str/int or list of strs/ints
GOES spacecraft #, e.g., probe=15
datatype: str
- Data type; Valid options:
+ Data type; Default '1min'
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
suffix: str
- The tplot variable names will be given this suffix. By default,
- no suffix is added.
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
downloadonly: bool
- Set this flag to download the CDF files, but not load them into
+ Set this flag to download the CDF files, but not load them into
tplot variables
no_update: bool
@@ -260,40 +417,204 @@ def hepad(trange=['2013-11-5', '2013-11-6'],
Returns
----------
- List of tplot variables created.
+ List of tplot variables created. Or list of filenames downloaded.
"""
- return load(instrument='hepad', trange=trange, probe=probe, datatype=datatype, suffix=suffix, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+ return load(instrument='xrs', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+
+def euvs(trange=['2023-01-30', '2023-01-31'],
+ probe='16',
+ datatype='1min',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the GOES Extreme Ultraviolet Sensor (EUVS), (probes 16-18)
-def xrs(trange=['2013-11-5', '2013-11-6'],
- probe='15',
- datatype='1min',
- suffix='',
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str/int or list of strs/ints
+ GOES spacecraft #, e.g., probe=16
+
+ datatype: str
+ Data type; Default '1min'
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
+ suffix: str
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created. Or list of filenames downloaded.
+
+ """
+ return load(instrument='euvs', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+
+def mag(trange=['2023-01-30', '2023-01-31'],
+ probe='16',
+ datatype='1min',
+ prefix='',
+ suffix='',
downloadonly=False,
no_update=False,
time_clip=False):
"""
- This function loads data from the GOES X-ray Sensor
-
+ This function loads data from the GOES Magnetometer, (probes 16-18)
+
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe: str/int or list of strs/ints
- GOES spacecraft #, e.g., probe=15
+ GOES spacecraft #, e.g., probe=16
datatype: str
- Data type; Valid options:
+ Data type; Default '1min'
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
+ suffix: str
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created. Or list of filenames downloaded.
+
+ """
+ return load(instrument='mag', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+
+def mpsh(trange=['2023-01-30', '2023-01-31'],
+ probe='16',
+ datatype='1min',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the GOES Magnetospheric Particle Sensor (MPS-HI), (probes 16-18)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str/int or list of strs/ints
+ GOES spacecraft #, e.g., probe=16
+
+ datatype: str
+ Data type; Default '1min'
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
+ suffix: str
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created. Or list of filenames downloaded.
+
+ """
+ return load(instrument='mpsh', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+
+
+def sgps(trange=['2023-01-30', '2023-01-31'],
+ probe='16',
+ datatype='1min',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the GOES Solar and Galactic Proton Sensor (SGPS), (probes 16-18)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str/int or list of strs/ints
+ GOES spacecraft #, e.g., probe=16
+
+ datatype: str
+ Data type; Default '1min'
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
suffix: str
- The tplot variable names will be given this suffix. By default,
- no suffix is added.
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
downloadonly: bool
- Set this flag to download the CDF files, but not load them into
+ Set this flag to download the CDF files, but not load them into
tplot variables
no_update: bool
@@ -304,7 +625,8 @@ def xrs(trange=['2013-11-5', '2013-11-6'],
Returns
----------
- List of tplot variables created.
+ List of tplot variables created. Or list of filenames downloaded.
"""
- return load(instrument='xrs', trange=trange, probe=probe, datatype=datatype, suffix=suffix, downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
+ return load(instrument='sgps', trange=trange, probe=probe, datatype=datatype, prefix=prefix, suffix=suffix,
+ downloadonly=downloadonly, time_clip=time_clip, no_update=no_update)
diff --git a/pyspedas/goes/config.py b/pyspedas/goes/config.py
index d3830bf6..bb185062 100644
--- a/pyspedas/goes/config.py
+++ b/pyspedas/goes/config.py
@@ -1,11 +1,12 @@
import os
CONFIG = {'local_data_dir': 'goes_data/',
- 'remote_data_dir': 'https://satdat.ngdc.noaa.gov/sem/goes/data/'}
+ 'remote_data_dir': 'https://www.ncei.noaa.gov/data/goes-space-environment-monitor/access/'}
# override local data directory with environment variables
if os.environ.get('SPEDAS_DATA_DIR'):
- CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'goes'])
+ CONFIG['local_data_dir'] = os.sep.join(
+ [os.environ['SPEDAS_DATA_DIR'], 'goes'])
if os.environ.get('GOES_DATA_DIR'):
- CONFIG['local_data_dir'] = os.environ['GOES_DATA_DIR']
\ No newline at end of file
+ CONFIG['local_data_dir'] = os.environ['GOES_DATA_DIR']
diff --git a/pyspedas/goes/load.py b/pyspedas/goes/load.py
index cf1203fb..13dd6020 100644
--- a/pyspedas/goes/load.py
+++ b/pyspedas/goes/load.py
@@ -1,20 +1,242 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import netcdf_to_tplot
from .config import CONFIG
-def load(trange=['2013-11-5', '2013-11-6'],
+
+def loadr(trange=['2023-01-01', '2032-01-02'],
+ probe='16',
+ instrument='mag',
+ datatype='1min',
+ prefix='',
+ suffix='',
+ downloadonly=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads GOES-R L2 data (GOES-16, GOES-17, GOES-18)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str/int or list of strs/ints
+ GOES spacecraft #, e.g., probe=16
+
+ instrument: str
+ name of the instrument (euvs, xrs, mag, mpsh, sgps)
+
+ datatype: str
+ Data resolution, default is '1min'
+ Valid options: low (avg), hi (full), and various other options depending on instrument
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example 'g16_'.
+
+ suffix: str
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ -------
+ List of tplot variables created. Or list of filenames downloaded.
+
+ Notes
+ -----
+ Information: https://www.ngdc.noaa.gov/stp/satellite/goes-r.html
+ Data: https://data.ngdc.noaa.gov/platforms/solar-space-observing-satellites/goes/
+ Path: goesNN/l2/data/instrument/YYYY/MM/file.nc
+ Time variable: 'time', seconds since 2000-01-01 12:00:00
+
+ GOES-EAST (GOES-16, 2017-)
+ GOES-WEST (GOES-17, 2018-2022; GOES-18, 2023-)
+
+ Instruments: euvs (hi, full, 1min, euvs-l2-avg1m_science/2021/05/sci_euvs-l2-avg1m_g16_d20210530_v1-0-3.nc)
+ euvs (low, avg, 1day, euvs-l2-avg1d_science/2021/06/sci_euvs-l2-avg1d_g16_d20210630_v1-0-3.nc)
+ xrs (hi, full, 1sec, xrsf-l2-flx1s_science/2022/08/sci_xrsf-l2-flx1s_g16_d20220830_v2-1-0.nc)
+ xrs (low, avg, 1min, xrsf-l2-avg1m_science/2021/06/sci_xrsf-l2-avg1m_g16_d20210630_v2-1-0.nc)
+ mag (hi, full, 0.1sec, magn-l2-hires/2021/06/dn_magn-l2-hires_g16_d20210629_v1-0-1.nc)
+ mag (low, avg, 1min, magn-l2-avg1m/2022/12/dn_magn-l2-avg1m_g16_d20221230_v2-0-2.nc)
+ mpsh (hi, full, 1min, mpsh-l2-avg1m/2022/12/sci_mpsh-l2-avg1m_g16_d20221230_v2-0-0.nc)
+ mpsh (low, avg, 5min, mpsh-l2-avg5m/2022/12/sci_mpsh-l2-avg5m_g16_d20221230_v2-0-0.nc)
+ sgps (hi, full, 1min, sgps-l2-avg1m/2022/12/sci_sgps-l2-avg1m_g17_d20221230_v3-0-0.nc)
+ sgps (low, avg, 5min, sgps-l2-avg5m/2022/12/sci_sgps-l2-avg5m_g17_d20221230_v3-0-0.nc)
+
+ EXIS (Extreme Ultraviolet and X-ray Sensors), EUVS and XRS
+ EUVS: Spectral line irradiances, the Mg II index, and proxy spectra from the EXIS Extreme Ultraviolet Sensor (EUVS)
+ EUVS: Daily averages of spectral line irradiances, the Mg II index, and proxy spectra
+ XRS: 1-minute averages of XRS measurements
+ XRS: High cadence measurements from the EXIS X-Ray Sensor (XRS)
+ MAG (Magnetometer)
+ MAG: Full resolution magnetic field readings in different coordinate systems
+ MAG: Averages of 10 Hz magnetometer field readings
+ SEISS (Space Environment In Situ Suite): 1-min and 5-min averages for the Magnetospheric Particle Sensors (MPS-HI and MPS-LO)
+ and for the Solar and Galactic Proton Sensor (SGPS)
+
+ Wrappers:
+ pyspedas.goes.euvs
+ pyspedas.goes.xrs
+ pyspedas.goes.mag
+ pyspedas.goes.mpsh
+ pyspedas.goes.sgps
+
+ Example
+ -------
+ from pyspedas.goes import load
+ trange = ['2023-01-01', '2023-01-02']
+ load(trange=trange, probe='16', instrument='mag', datatype='1min', time_clip=True)
+
+ """
+ goes_path_dir = 'https://data.ngdc.noaa.gov/platforms/solar-space-observing-satellites/goes/'
+ time_var = 'time' # name of the time variable in the netcdf files
+ out_files = []
+ tvars = []
+
+ if not isinstance(probe, list):
+ probe = [probe]
+
+ for prb in probe:
+ remote_path = 'goes' + str(prb) + '/l2/data/'
+
+ if instrument == 'euvs':
+ if datatype in ['full', 'hi', '1min', 'avg1m']: # high resolution 1 min
+ pathformat = [remote_path + 'euvs-l2-avg1m_science/%Y/%m/sci_euvs-l2-avg1m_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ else: # low resolution 1 day, smaller files
+ pathformat = [remote_path + 'euvs-l2-avg1d_science/%Y/%m/sci_euvs-l2-avg1d_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ elif instrument == 'xrs':
+ if datatype in ['full', 'hi', '1sec', 'flx1s']: # high resolution 1 sec
+ pathformat = [remote_path + 'xrsf-l2-flx1s_science/%Y/%m/sci_xrsf-l2-flx1s_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ else: # low resolution 1 min, smaller files
+ pathformat = [remote_path + 'xrsf-l2-avg1m_science/%Y/%m/sci_xrsf-l2-avg1m_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ elif instrument == 'mag':
+ if datatype in ['full', 'hi', '0.1sec', 'hires']: # high resolution 0.1 sec
+ pathformat = [remote_path + 'magn-l2-hires/%Y/%m/dn_magn-l2-hires_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ else: # low resolution 1 min, smaller files
+ pathformat = [remote_path + 'magn-l2-avg1m/%Y/%m/dn_magn-l2-avg1m_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ elif instrument == 'mpsh':
+ time_var = 'L2_SciData_TimeStamp'
+ if datatype in ['full', 'hi', '1min', 'avg1m', '1m']: # high resolution 1 min
+ pathformat = [remote_path + 'mpsh-l2-avg1m/%Y/%m/sci_mpsh-l2-avg1m_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ else: # low resolution 5 min, smaller files
+ pathformat = [remote_path + 'mpsh-l2-avg5m/%Y/%m/sci_mpsh-l2-avg5m_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ elif instrument == 'sgps':
+ if datatype in ['full', 'hi', '1min', 'avg1m', '1m']: # high resolution 1 min
+ pathformat = [remote_path + 'sgps-l2-avg1m/%Y/%m/sci_sgps-l2-avg1m_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+ else: # low resolution 5 min, smaller files
+ pathformat = [remote_path + 'sgps-l2-avg5m/%Y/%m/sci_sgps-l2-avg5m_g' + str(prb) + '_d%Y%m%d_v?-?-?.nc']
+
+ # find the full remote path names using the trange
+ if not isinstance(pathformat, list):
+ pathformat = [pathformat]
+
+ remote_names = []
+ for path in pathformat:
+ remote_names.extend(dailynames(file_format=path, trange=trange))
+
+ files = download(remote_file=remote_names, remote_path=goes_path_dir, local_path=CONFIG['local_data_dir'], no_download=no_update)
+
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ tvars_local = []
+ if len(files) > 0 and downloadonly is False:
+ if prefix == 'probename':
+ prefix_local = 'g' + str(prb) + '_'
+ else:
+ prefix_local = prefix
+ tvars_local = netcdf_to_tplot(files, prefix=prefix_local, suffix=suffix, merge=True, time=time_var)
+
+ if len(tvars_local):
+ tvars.extend(tvars_local)
+
+ if downloadonly:
+ out_files = sorted(out_files)
+ return out_files
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
+
+
+def load(trange=['2013-11-05', '2013-11-06'],
probe='15',
instrument='fgm',
- datatype='1min',
- suffix='',
+ datatype='1min',
+ prefix='',
+ suffix='',
downloadonly=False,
no_update=False,
time_clip=False):
"""
- This function loads data from the GOES mission; this function is not meant
+ This function loads GOES L2 data
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str/int or list of strs/ints
+ GOES spacecraft #, e.g., probe=15
+
+ instrument: str
+ name of the instrument
+ (for GOES 8-15: fgm, eps, epead, maged, magpd, hepad, xrs)
+ (for GOES-R 16-18: euvs, xrs, mag, mpsh, sgps)
+
+ datatype: str
+ Data type; usually instrument resolution, depends on the instrument
+ Default is 1min
+ (valid for GOES 8-15: hi, low, full, avg, 1min, 5min)
+ (valid for GOES-R 16-18: hi, low, full, avg, and other options)
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
+ suffix: str
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ -------
+ List of tplot variables created. Or list of filenames downloaded.
+
+ Notes
+ -----
+ This function loads data from the GOES mission; this function is not meant
to be called directly; instead, see the wrappers:
pyspedas.goes.fgm
pyspedas.goes.eps
@@ -29,115 +251,156 @@ def load(trange=['2013-11-5', '2013-11-6'],
if not isinstance(probe, list):
probe = [probe]
- fullavgpath = ['full', 'avg']
- goes_path_dir = fullavgpath[datatype == '1min' or datatype == '5min']
+ probe_r = [] # GOES-R probes
+ probe_s = [] # GOES probes
+ out_files_r = [] # GOES-R files to download
+ out_files = [] # GOES files to download
+ tvars_r = [] # GOES-R list of variables created
+ tvars = [] # all variables created
+ # Find if we need to call the GOES-R load function for some probes
for prb in probe:
- remote_path = goes_path_dir + '/%Y/%m/goes' + str(prb) + '/netcdf/'
+ if int(prb) > 15:
+ probe_r.append(str(prb))
+ else:
+ probe_s.append(str(prb))
+
+ if len(probe_r):
+ tvars_r = loadr(trange=trange,
+ probe=probe_r,
+ instrument=instrument,
+ datatype=datatype,
+ prefix=prefix,
+ suffix=suffix,
+ downloadonly=downloadonly,
+ no_update=no_update,
+ time_clip=time_clip)
+ if downloadonly:
+ out_files_r = tvars_r
+
+ # Continue with loading GOES (1-15) data
+ for prb in probe_s:
+ avg_path = 'avg/%Y/%m/goes' + str(prb) + '/netcdf/' + 'g' + str(prb)
+ full_path = 'full/%Y/%m/goes' + str(prb) + '/netcdf/' + 'g' + str(prb)
if instrument == 'fgm':
- if datatype == '512ms': # full, unaveraged data
- pathformat = remote_path + 'g' + str(prb) + '_magneto_512ms_%Y%m%d_%Y%m%d.nc'
- elif datatype == '1min': # 1 min averages
- pathformat = remote_path + 'g' + str(prb) + '_magneto_1m_%Y%m01_%Y%m??.nc'
- elif datatype == '5min': # 5 min averages
- pathformat = remote_path + 'g' + str(prb) + '_magneto_5m_%Y%m01_%Y%m??.nc'
+ if datatype == '512ms' or datatype == 'full': # full, unaveraged data
+ pathformat = full_path + '_magneto_512ms_%Y%m%d_%Y%m%d.nc'
+ elif datatype == '5min': # 5 min averages
+ pathformat = avg_path + '_magneto_5m_%Y%m01_%Y%m??.nc'
+ else: # 1 min averages, goes13, goes15 only contain 1m averages
+ pathformat = avg_path + '_magneto_1m_%Y%m01_%Y%m??.nc'
elif instrument == 'eps':
# energetic particle sensor -- only valid for GOES-08 through GOES-12, only averaged data available
- if datatype == '1min':
- pathformat = remote_path + 'g' + str(prb) + '_eps_1m_%Y%m01_%Y%m??.nc'
- else:
- pathformat = remote_path + 'g' + str(prb) + '_eps_5m_%Y%m01_%Y%m??.nc'
+ if datatype == '1min' or datatype == 'full':
+ pathformat = avg_path + '_eps_1m_%Y%m01_%Y%m??.nc'
+ else: # 'low' or 5min
+ pathformat = avg_path + '_eps_5m_%Y%m01_%Y%m??.nc'
elif instrument == 'epead':
# electron, proton, alpha detector -- only valid on GOES-13, 14, 15
if datatype == '1min':
- pathformat = [remote_path + 'g' + str(prb) + '_epead_e13ew_1m_%Y%m01_%Y%m??.nc',
- '_epead_p17ew_1m_%Y%m01_%Y%m??.c',
- '_epead_a16ew_1m_%Y%m01_%Y%m??.nc']
- elif datatype == '5min':
- pathformat = [remote_path + 'g' + str(prb) + '_epead_e13ew_5m_%Y%m01_%Y%m??.nc',
- '_epead_p17ew_5m_%Y%m01_%Y%m??.c',
- '_epead_a16ew_5m_%Y%m01_%Y%m??.nc']
- else:
- pathformat = [remote_path + 'g' + str(prb) + '_epead_e1ew_4s_%Y%m%d_%Y%m%d.nc',
- '_epead_e2ew_16s_%Y%m%d_%Y%m%d.nc',
- '_epead_e3ew_16s_%Y%m%d_%Y%m%d.nc',
- '_epead_p1ew_8s_%Y%m%d_%Y%m%d.nc',
- '_epead_p27e_32s_%Y%m%d_%Y%m%d.nc',
- '_epead_p27w_32s_%Y%m%d_%Y%m%d.nc',
- '_epead_a16e_32s_%Y%m%d_%Y%m%d.nc',
- '_epead_a16w_32s_%Y%m%d_%Y%m%d.nc']
+ pathformat = ['_epead_e13ew_1m_%Y%m01_%Y%m??.nc',
+ '_epead_p17ew_1m_%Y%m01_%Y%m??.c',
+ '_epead_a16ew_1m_%Y%m01_%Y%m??.nc']
+ pathformat = [avg_path + s for s in pathformat]
+ elif datatype == '5min' or datatype == 'low':
+ pathformat = ['_epead_e13ew_5m_%Y%m01_%Y%m??.nc',
+ '_epead_p17ew_5m_%Y%m01_%Y%m??.c',
+ '_epead_a16ew_5m_%Y%m01_%Y%m??.nc']
+ pathformat = [avg_path + s for s in pathformat]
+ else: # full
+ pathformat = ['_epead_a16e_32s_%Y%m%d_%Y%m%d.nc',
+ '_epead_a16w_32s_%Y%m%d_%Y%m%d.nc',
+ '_epead_e1ew_4s_%Y%m%d_%Y%m%d.nc',
+ '_epead_e2ew_16s_%Y%m%d_%Y%m%d.nc',
+ '_epead_e3ew_16s_%Y%m%d_%Y%m%d.nc',
+ '_epead_p1ew_8s_%Y%m%d_%Y%m%d.nc',
+ '_epead_p27e_32s_%Y%m%d_%Y%m%d.nc',
+ '_epead_p27w_32s_%Y%m%d_%Y%m%d.nc']
+ pathformat = [full_path + s for s in pathformat]
elif instrument == 'maged':
# magnetospheric electron detector -- only valid on GOES 13, 14, 15
if datatype == '1min':
- pathformat = remote_path + 'g' + str(prb) + '_maged_19me15_1m_%Y%m01_%Y%m??.nc'
- elif datatype == '5min':
- pathformat = remote_path + 'g' + str(prb) + '_maged_19me15_5m_%Y%m01_%Y%m??.nc'
- else:
- channels = ['me1','me2','me3','me4','me5']
- resolution = ['2','2','4','16','32']
+ pathformat = avg_path + '_maged_19me15_1m_%Y%m01_%Y%m??.nc'
+ elif datatype == '5min' or datatype == 'low':
+ pathformat = avg_path + '_maged_19me15_5m_%Y%m01_%Y%m??.nc'
+ else: # full
+ channels = ['me1', 'me2', 'me3', 'me4', 'me5']
+ resolution = ['2', '2', '4', '16', '32']
pathformat = []
for idx, channel in enumerate(channels):
- pathformat.append(remote_path + 'g' + str(prb) + '_maged_19'+channel+'_'+resolution[idx]+'s_%Y%m%d_%Y%m%d.nc')
+ pathformat.append('_maged_19' + channel + '_' + resolution[idx] + 's_%Y%m%d_%Y%m%d.nc')
+ pathformat = [full_path + s for s in pathformat]
elif instrument == 'magpd':
# magnetospheric proton detector -- only valid on GOES 13, 14, 15
- if datatype == '1min':
- pathformat = remote_path + 'g' + str(prb) + '_magpd_19mp15_1m_%Y%m01_%Y%m??.nc'
- elif datatype == '5min':
- pathformat = remote_path + 'g' + str(prb) + '_magpd_19mp15_5m_%Y%m01_%Y%m??.nc'
- else:
- channels = ['mp1','mp2','mp3','mp4','mp5']
- resolution = ['16','16','16','32','32']
+ if datatype == '1min' or datatype == 'low':
+ pathformat = avg_path + '_magpd_19mp15_1m_%Y%m01_%Y%m??.nc'
+ else: # full
+ channels = ['mp1', 'mp2', 'mp3', 'mp4', 'mp5']
+ resolution = ['16', '16', '16', '32', '32']
pathformat = []
for idx, channel in enumerate(channels):
- pathformat.append(remote_path + 'g' + str(prb) + '_magpd_19'+channel+'_'+resolution[idx]+'s_%Y%m%d_%Y%m%d.nc')
+ pathformat.append('_magpd_19' + channel + '_'+resolution[idx] + 's_%Y%m%d_%Y%m%d.nc')
+ pathformat = [full_path + s for s in pathformat]
elif instrument == 'hepad':
# high energy proton and alpha detector -- valid for GOES 08-15
if datatype == '1min':
- pathformat = [remote_path + 'g' + str(prb) + '_hepad_ap_1m_%Y%m01_%Y%m??.nc',
- '_hepad_s15_1m_%Y%m01_%Y%m??.nc']
- elif datatype == '5min':
- pathformat = [remote_path + 'g' + str(prb) + '_hepad_ap_5m_%Y%m01_%Y%m??.nc',
- '_hepad_s15_5m_%Y%m01_%Y%m??.nc']
+ pathformat = ['_hepad_ap_1m_%Y%m01_%Y%m??.nc',
+ '_hepad_s15_1m_%Y%m01_%Y%m??.nc']
+ pathformat = [avg_path + s for s in pathformat]
+ elif datatype == '5min' or datatype == 'low':
+ pathformat = ['_hepad_ap_5m_%Y%m01_%Y%m??.nc',
+ '_hepad_s15_5m_%Y%m01_%Y%m??.nc']
+ pathformat = [avg_path + s for s in pathformat]
else:
- pathformat = [remote_path + 'g' + str(prb) + '_hepad_ap_32s_%Y%m%d_%Y%m%d.nc',
- '_hepad_s15_4s_%Y%m%d_%Y%m%d.nc']
+ pathformat = ['_hepad_ap_32s_%Y%m%d_%Y%m%d.nc',
+ '_hepad_s15_4s_%Y%m%d_%Y%m%d.nc']
+ pathformat = [full_path + s for s in pathformat]
elif instrument == 'xrs':
# x-ray sensor -- valid for GOES 08-15
if datatype == '1min':
- pathformat = remote_path + 'g' + str(prb) + '_xrs_1m_%Y%m01_%Y%m??.nc'
- elif datatype == '5min':
- pathformat = remote_path + 'g' + str(prb) + '_xrs_5m_%Y%m01_%Y%m??.nc'
+ pathformat = avg_path + '_xrs_1m_%Y%m01_%Y%m??.nc'
+ elif datatype == '5min' or datatype == 'low':
+ pathformat = avg_path + '_xrs_5m_%Y%m01_%Y%m??.nc'
else:
- pathformat = remote_path + 'g' + str(prb) + '_xrs_2s_%Y%m%d_%Y%m%d.nc'
+ pathformat = ['_xrs_2s_%Y%m%d_%Y%m%d.nc',
+ '_xrs_3s_%Y%m%d_%Y%m%d.nc']
+ pathformat = [full_path + s for s in pathformat]
# find the full remote path names using the trange
- if isinstance(pathformat, list):
- remote_names = []
- for path in pathformat:
- remote_names.extend(dailynames(file_format=path, trange=trange))
- else:
- remote_names = dailynames(file_format=pathformat, trange=trange)
+ if not isinstance(pathformat, list):
+ pathformat = [pathformat]
- out_files = []
+ remote_names = []
+ for path in pathformat:
+ remote_names.extend(dailynames(file_format=path, trange=trange))
files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update)
if files is not None:
for file in files:
out_files.append(file)
- out_files = sorted(out_files)
+ tvars_local = []
+ if len(files) > 0 and downloadonly is False:
+ if prefix == 'probename':
+ prefix_local = 'g' + str(prb) + '_'
+ else:
+ prefix_local = prefix
+ tvars_local = netcdf_to_tplot(files, prefix=prefix_local, suffix=suffix, merge=True, time='time_tag')
+
+ if len(tvars_local) > 0:
+ tvars.extend(tvars_local)
if downloadonly:
+ out_files.extend(out_files_r) # append GOES-R filenames
+ out_files = sorted(out_files)
return out_files
- tvars = netcdf_to_tplot(out_files, suffix=suffix, merge=True, time='time_tag')
-
- if tvars is None:
- return
-
if time_clip:
for new_var in tvars:
tclip(new_var, trange[0], trange[1], suffix='')
+ if len(tvars_r):
+ tvars.extend(tvars_r) # append GOES-R variables
+
return tvars
diff --git a/pyspedas/goes/load_orbit.py b/pyspedas/goes/load_orbit.py
new file mode 100644
index 00000000..fa8f9e68
--- /dev/null
+++ b/pyspedas/goes/load_orbit.py
@@ -0,0 +1,107 @@
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pytplot import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+
+def load_orbit(trange=['2013-11-5', '2013-11-6'],
+ probe='15',
+ prefix='',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=True):
+ """
+ This function loads GOES orbit data from SPDF:
+
+ https://spdf.gsfc.nasa.gov/pub/data/goes/goes#/orbit/YYYY/
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str/int or list of strs/ints
+ GOES spacecraft #, e.g., probe=15
+
+ prefix: str
+ The tplot variable names will be given this prefix.
+ By default, no prefix is added.
+ If 'probename' then the name will be used, for example g16.
+
+ suffix: str
+ The tplot variable names will be given this suffix.
+ By default, no suffix is added.
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ If set, load the data into dictionaries containing the numpy objects instead
+ of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created. Or list of filenames downloaded.
+
+ """
+ remote_data_dir = 'https://spdf.gsfc.nasa.gov/pub/data/goes/'
+ out_files = [] # list of local files downloaded
+ tvars = [] # list of tplot variables created
+
+ if not isinstance(probe, list):
+ probe = [probe]
+
+ for prb in probe:
+
+ # yearly files
+ pathformat = 'goes' + str(prb) + '/orbit/%Y/goes' + str(prb) + '_ephemeris_ssc_%Y0101_v??.cdf'
+
+ # find the full remote path names using the trange
+ remote_names = dailynames(file_format=pathformat, trange=trange)
+
+ files = download(remote_file=remote_names, remote_path=remote_data_dir,
+ local_path=CONFIG['local_data_dir'], no_download=no_update)
+
+ out_files_local = []
+
+ if files is not None:
+ for file in files:
+ out_files_local.append(file)
+
+ out_files.extend(out_files_local)
+
+ tvars_local = []
+ if not downloadonly:
+ if prefix == 'probename':
+ prefix_local = 'g' + str(prb) + '_'
+ else:
+ prefix_local = prefix
+
+ tvars_local = cdf_to_tplot(out_files_local, prefix=prefix_local, suffix=suffix, get_support_data=get_support_data,
+ varformat=varformat, varnames=varnames, notplot=notplot)
+ tvars.extend(tvars_local)
+
+ if time_clip:
+ for new_var in tvars_local:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ if downloadonly:
+ return out_files
+
+ return tvars
diff --git a/pyspedas/goes/tests/tests.py b/pyspedas/goes/tests/tests.py
index 458210bb..0b7323e1 100644
--- a/pyspedas/goes/tests/tests.py
+++ b/pyspedas/goes/tests/tests.py
@@ -1,26 +1,187 @@
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
import pyspedas
+from pytplot import del_data
+
class LoadTestCases(unittest.TestCase):
+
def test_downloadonly(self):
+ del_data()
mag_files = pyspedas.goes.fgm(datatype='1min', downloadonly=True)
self.assertTrue(os.path.exists(mag_files[0]))
+ def test_load_orbit_data(self):
+ del_data()
+ orbit_vars = pyspedas.goes.orbit(downloadonly=True)
+ orbit_vars = pyspedas.goes.orbit(notplot=True)
+ orbit_vars = pyspedas.goes.orbit()
+ self.assertTrue(data_exists('XYZ_GSM'))
+ self.assertTrue(data_exists('XYZ_GSE'))
+ self.assertTrue(data_exists('XYZ_SM'))
+ self.assertTrue(data_exists('XYZ_GEO'))
+
def test_load_1min_mag_data(self):
+ del_data()
mag_vars = pyspedas.goes.fgm(datatype='1min')
self.assertTrue(data_exists('BX_1'))
self.assertTrue(data_exists('BY_1'))
self.assertTrue(data_exists('BZ_1'))
- def test_load_mag_data(self):
+ def test_load_5min_mag_data(self):
+ del_data()
+ mag_vars = pyspedas.goes.fgm(datatype='5min', probe='10', trange=['2000-07-01', '2000-07-02'], time_clip=True)
+ self.assertTrue(data_exists('ht'))
+
+ def test_load_full_mag_data(self):
+ del_data()
mag_vars = pyspedas.goes.fgm(datatype='512ms', suffix='_512')
self.assertTrue(data_exists('BX_1_512'))
self.assertTrue(data_exists('BY_1_512'))
self.assertTrue(data_exists('BZ_1_512'))
+ def test_load_1min_epead_data(self):
+ del_data()
+ epead_vars = pyspedas.goes.epead()
+ self.assertTrue(data_exists('E1E_UNCOR_FLUX'))
+ self.assertTrue(data_exists('E1W_UNCOR_FLUX'))
+ self.assertTrue(data_exists('E2E_UNCOR_FLUX'))
+
+ def test_load_full_epead_data(self):
+ del_data()
+ epead_vars = pyspedas.goes.epead(datatype='1min')
+ self.assertTrue(data_exists('E1E_UNCOR_FLUX'))
+ self.assertTrue(data_exists('E1W_UNCOR_FLUX'))
+ self.assertTrue(data_exists('E2E_UNCOR_FLUX'))
+
+ def test_load_5min_epead_data(self):
+ del_data()
+ epead_vars = pyspedas.goes.epead(datatype='5min')
+ self.assertTrue(data_exists('E1E_UNCOR_FLUX'))
+ self.assertTrue(data_exists('E1W_UNCOR_FLUX'))
+ self.assertTrue(data_exists('E2E_UNCOR_FLUX'))
+
+ def test_load_full_maged_data(self):
+ del_data()
+ maged_vars = pyspedas.goes.maged(datatype='full')
+ self.assertTrue(data_exists('M_1ME1_DTC_UNCOR_CR'))
+
+ def test_load_1min_maged_data(self):
+ del_data()
+ maged_vars = pyspedas.goes.maged(datatype='1min', time_clip=True)
+ self.assertTrue(data_exists('M_1ME1_DTC_UNCOR_FLUX'))
+
+ def test_load_5min_maged_data(self):
+ del_data()
+ maged_vars = pyspedas.goes.maged(datatype='5min', time_clip=True)
+ self.assertTrue(data_exists('M_2ME1_DTC_COR_FLUX'))
+
+ def test_load_full_magpd_data(self):
+ del_data()
+ magpd_vars = pyspedas.goes.magpd(datatype='full')
+ self.assertTrue(data_exists('M_1MP1_DTC_UNCOR_CR'))
+
+ def test_load_1min_magpd_data(self):
+ del_data()
+ magpd_vars = pyspedas.goes.magpd(datatype='1min', time_clip=True)
+ self.assertTrue(data_exists('M_1MP1_DTC_UNCOR_FLUX'))
+
+ def test_load_full_hepad_data(self):
+ del_data()
+ hepad_vars = pyspedas.goes.hepad(datatype='full')
+ self.assertTrue(data_exists('P10_FLUX'))
+
+ def test_load_1min_hepad_data(self):
+ del_data()
+ hepad_vars = pyspedas.goes.hepad(prefix='probename', time_clip=True)
+ self.assertTrue(data_exists('g15_P10_FLUX'))
+
+ def test_load_xrs_data(self):
+ del_data()
+ xrs_vars = pyspedas.goes.xrs(probe='10', datatype='full', trange=['2002-08-01', '2002-08-01'])
+ self.assertTrue(data_exists('xl'))
+
+ def test_load_xrs_5m_data(self):
+ del_data()
+ xrs_vars = pyspedas.goes.xrs(probe='11', datatype='5min', trange=['2000-09-01', '2000-09-01'], time_clip=True)
+ self.assertTrue(data_exists('xl'))
+
+ def test_load_xrs_1m_data(self):
+ del_data()
+ xrs_vars = pyspedas.goes.xrs(probe='11', datatype='1min', trange=['2000-09-01', '2000-09-01'], prefix='probename', time_clip=True)
+ self.assertTrue(data_exists('g11_xs'))
+
+ def test_load_eps_1m_data(self):
+ del_data()
+ eps_vars = pyspedas.goes.eps(trange=['2000-09-01', '2000-09-01'], probe='11', time_clip=True)
+ self.assertTrue(data_exists('e1_flux_i'))
+ self.assertTrue(data_exists('e2_flux_i'))
+ self.assertTrue(data_exists('e3_flux_i'))
+
+ def test_load_eps_5m_data(self):
+ del_data()
+ eps_vars = pyspedas.goes.eps(trange=['2000-09-01', '2000-09-01'], probe='11', datatype='5min', time_clip=True)
+ self.assertTrue(data_exists('p1_flux'))
+ self.assertTrue(data_exists('p2_flux'))
+ self.assertTrue(data_exists('p3_flux'))
+ self.assertTrue(data_exists('p4_flux'))
+ self.assertTrue(data_exists('p5_flux'))
+ self.assertTrue(data_exists('p6_flux'))
+ self.assertTrue(data_exists('p7_flux'))
+
+ def test_load_xrs_data_16(self):
+ del_data()
+ xrs_vars_16 = pyspedas.goes.xrs(probe='16', trange=['2022-09-01', '2022-09-02'])
+ self.assertTrue('xrsa_flux' in xrs_vars_16)
+
+ def test_load_xrs_data_17_hi(self):
+ del_data()
+ xrs_vars_17 = pyspedas.goes.xrs(probe='17', trange=['2022-07-01', '2022-07-02'], datatype='hi')
+ self.assertTrue('xrsb_flux' in xrs_vars_17)
+
+ def test_load_euvs_data_17(self):
+ del_data()
+ euvs_vars_17 = pyspedas.goes.euvs(probe='17', trange=['2022-09-01', '2022-09-02'], prefix='probename')
+ self.assertTrue('g17_irr_256' in euvs_vars_17)
+
+ def test_load_euvs_data_16_hi(self):
+ del_data()
+ euvs_vars_16 = pyspedas.goes.euvs(probe='16', trange=['2022-08-01', '2022-08-02'], prefix='probename', datatype='hi')
+ self.assertTrue('g16_irr_256' in euvs_vars_16)
+
+ def test_load_mag_data_16(self):
+ del_data()
+ mag_vars_16 = pyspedas.goes.mag(probe='16', trange=['2023-01-30', '2023-01-31'], prefix='goes16_')
+ self.assertTrue('goes16_b_total' in mag_vars_16)
+
+ def test_load_mag_data_17(self):
+ del_data()
+ mag_vars_17 = pyspedas.goes.mag(probe='17', trange=['2022-01-30', '2022-01-31'], prefix='goes17_', datatype='hi')
+ self.assertTrue('goes17_b_gse' in mag_vars_17)
+
+ def test_load_mpsh_data_17(self):
+ del_data()
+ mpsh_vars_17 = pyspedas.goes.mpsh(probe='17', trange=['2022-09-01', '2022-09-02'], prefix='probename', time_clip=True)
+ self.assertTrue('g17_AvgDiffElectronFlux' in mpsh_vars_17)
+
+ def test_load_mpsh_data_16(self):
+ del_data()
+ mpsh_vars_16 = pyspedas.goes.mpsh(probe='16', trange=['2022-10-01', '2022-10-02'], prefix='probename', datatype='hi')
+ self.assertTrue('g16_AvgDiffProtonFlux' in mpsh_vars_16)
+
+ def test_load_sgps_data_16(self):
+ del_data()
+ sgps_vars_16 = pyspedas.goes.sgps(probe='16', trange=['2023-01-30', '2023-01-31'], prefix='probename')
+ self.assertTrue('g16_AvgDiffAlphaFlux' in sgps_vars_16)
+
+ def test_load_sgps_data_18(self):
+ del_data()
+ sgps_vars_18 = pyspedas.goes.sgps(probe='18', trange=['2023-01-30', '2023-01-31'], prefix='probename', datatype='hi', time_clip=True)
+ self.assertTrue('g18_AvgIntProtonFlux' in sgps_vars_18)
+
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/hapi/hapi.py b/pyspedas/hapi/hapi.py
index 055807d7..1138a463 100644
--- a/pyspedas/hapi/hapi.py
+++ b/pyspedas/hapi/hapi.py
@@ -1,5 +1,5 @@
+import logging
import warnings
-from time import sleep
from pyspedas import time_double
from pytplot import get_data, store_data, options
import numpy as np
@@ -7,7 +7,8 @@
try:
from hapiclient import hapi as load_hapi
except ImportError:
- print('hapiclient not found; install with: "pip install hapiclient"')
+ logging.error('hapiclient not found; install with: "pip install hapiclient"')
+
def hapi(trange=None, server=None, dataset=None, parameters='', suffix='',
prefix='', catalog=False):
@@ -44,12 +45,12 @@ def hapi(trange=None, server=None, dataset=None, parameters='', suffix='',
"""
if server is None:
- print('Error, no server specified; example servers include:')
- print('- https://cdaweb.gsfc.nasa.gov/hapi')
- print('- https://pds-ppi.igpp.ucla.edu/hapi')
- print('- http://planet.physics.uiowa.edu/das/das2Server/hapi')
- print('- https://iswa.gsfc.nasa.gov/IswaSystemWebApp/hapi')
- print('- http://lasp.colorado.edu/lisird/hapi')
+ logging.error('No server specified; example servers include:')
+ logging.error('- https://cdaweb.gsfc.nasa.gov/hapi')
+ logging.error('- https://pds-ppi.igpp.ucla.edu/hapi')
+ logging.error('- http://planet.physics.uiowa.edu/das/das2Server/hapi')
+ logging.error('- https://iswa.gsfc.nasa.gov/IswaSystemWebApp/hapi')
+ logging.error('- http://lasp.colorado.edu/lisird/hapi')
return
if catalog:
@@ -57,20 +58,20 @@ def hapi(trange=None, server=None, dataset=None, parameters='', suffix='',
items = []
if 'catalog' in catalog.keys():
items = catalog['catalog']
- print('Available datasets: ')
+ logging.info('Available datasets: ')
for item in items:
if 'title' in item.keys():
- print(item['id'] + ': ' + item['title'])
+ logging.info(item['id'] + ': ' + item['title'])
else:
- print(item['id'])
+ logging.info(item['id'])
return
if dataset is None:
- print('Error, no dataset specified; please see the catalog for a list of available data sets.')
+ logging.error('Error, no dataset specified; please see the catalog for a list of available data sets.')
return
if trange is None:
- print('Error, no trange specified')
+ logging.error('Error, no trange specified')
return
if isinstance(parameters, list):
@@ -170,4 +171,4 @@ def hapi(trange=None, server=None, dataset=None, parameters='', suffix='',
if saved:
out_vars.append(prefix + param_name + suffix)
- return out_vars
\ No newline at end of file
+ return out_vars
diff --git a/pyspedas/hapi/tests/__init__.py b/pyspedas/hapi/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/hapi/tests/tests.py b/pyspedas/hapi/tests/tests.py
new file mode 100644
index 00000000..277614c7
--- /dev/null
+++ b/pyspedas/hapi/tests/tests.py
@@ -0,0 +1,38 @@
+import unittest
+from pyspedas.hapi.hapi import hapi
+from pytplot import data_exists
+
+
+class HAPITests(unittest.TestCase):
+ def test_print_servers(self):
+ hapi(trange=['2003-10-20', '2003-11-30'])
+
+ def test_print_catalog(self):
+ hapi(server='https://cdaweb.gsfc.nasa.gov/hapi', catalog=True)
+
+ def test_dataset_not_specified(self):
+ # dataset not specified
+ h_vars = hapi(trange=['2003-10-20', '2003-11-30'],
+ server='https://cdaweb.gsfc.nasa.gov/hapi')
+
+ def test_trange_not_specified(self):
+ # trange not specified
+ h_vars = hapi(dataset='OMNI_HRO2_1MIN',
+ server='https://cdaweb.gsfc.nasa.gov/hapi')
+
+ def test_cdaweb_mms_spec(self):
+ h_vars = hapi(trange=['2019-10-16', '2019-10-17'],
+ server='https://cdaweb.gsfc.nasa.gov/hapi',
+ dataset='MMS4_EDP_SRVY_L2_HFESP')
+
+ def test_cdaweb_omni(self):
+ h_vars = hapi(trange=['2003-10-20', '2003-11-30'],
+ server='https://cdaweb.gsfc.nasa.gov/hapi',
+ dataset='OMNI_HRO2_1MIN')
+ self.assertTrue(data_exists('BX_GSE'))
+ self.assertTrue(data_exists('BY_GSE'))
+ self.assertTrue(data_exists('BZ_GSE'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/image/__init__.py b/pyspedas/image/__init__.py
index 40e0c2ad..35131cac 100644
--- a/pyspedas/image/__init__.py
+++ b/pyspedas/image/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def lena(trange=['2004-11-5', '2004-11-6'],
datatype='k0',
@@ -420,3 +421,7 @@ def orbit(trange=['2004-11-5', '2004-11-6'],
"""
return load(instrument='orbit', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='IMAGE', instrument=instrument, label=label)
diff --git a/pyspedas/image/load.py b/pyspedas/image/load.py
index 91a531a8..56765342 100644
--- a/pyspedas/image/load.py
+++ b/pyspedas/image/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/image/tests/tests.py b/pyspedas/image/tests/tests.py
index eddb2e85..9c102a3c 100644
--- a/pyspedas/image/tests/tests.py
+++ b/pyspedas/image/tests/tests.py
@@ -1,11 +1,10 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
from pytplot import del_data
+
class LoadTestCases(unittest.TestCase):
def tearDown(self):
del_data('*')
@@ -14,6 +13,10 @@ def test_downloadonly(self):
files = pyspedas.image.mena(downloadonly=True)
self.assertTrue(os.path.exists(files[0]))
+ def test_load_notplot(self):
+ mena_vars = pyspedas.image.mena(notplot=True)
+ self.assertTrue('Image0' in mena_vars)
+
def test_load_lena_data(self):
lena_vars = pyspedas.image.lena(time_clip=True)
self.assertTrue(data_exists('Image0'))
@@ -48,5 +51,6 @@ def test_load_preorb_data(self):
self.assertTrue(data_exists('GSM_POS'))
self.assertTrue(data_exists('GSM_VEL'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/kyoto/load_dst.py b/pyspedas/kyoto/load_dst.py
index 91c89520..f9a835fc 100644
--- a/pyspedas/kyoto/load_dst.py
+++ b/pyspedas/kyoto/load_dst.py
@@ -1,10 +1,11 @@
-
+import logging
import requests
from pyspedas import time_double
from pyspedas import time_clip as tclip
from pyspedas.utilities.dailynames import dailynames
from pytplot import store_data, options
+
def dst(trange=None, time_clip=True, remote_data_dir='http://wdc.kugi.kyoto-u.ac.jp/', suffix=''):
"""
Loads Dst data from the Kyoto servers.
@@ -37,14 +38,10 @@ def dst(trange=None, time_clip=True, remote_data_dir='http://wdc.kugi.kyoto-u.ac
"""
if trange is None:
- print('trange keyword required to download data.')
+ logging.error('trange keyword required to download data.')
return
file_names = dailynames(file_format='%Y%m/index.html', trange=trange)
-
- if file_names is None:
- print('No files found for this trange.')
- return
times = []
data = []
@@ -78,7 +75,7 @@ def dst(trange=None, time_clip=True, remote_data_dir='http://wdc.kugi.kyoto-u.ac
datatype = 'Real Time'
if len(times) == 0:
- print('No data found.')
+ logging.error('No data found.')
return
store_data('kyoto_dst'+suffix, data={'x': times, 'y': data})
@@ -88,16 +85,17 @@ def dst(trange=None, time_clip=True, remote_data_dir='http://wdc.kugi.kyoto-u.ac
options('kyoto_dst'+suffix, 'ytitle', 'Dst (' + datatype + ')')
- print('**************************************************************************************')
- print('The DST data are provided by the World Data Center for Geomagnetism, Kyoto, and')
- print(' are not for redistribution (http://wdc.kugi.kyoto-u.ac.jp/). Furthermore, we thank')
- print(' the geomagnetic observatories (Kakioka [JMA], Honolulu and San Juan [USGS], Hermanus')
- print(' [RSA], Alibag [IIG]), NiCT, INTERMAGNET, and many others for their cooperation to')
- print(' make the Dst index available.')
- print('**************************************************************************************')
+ logging.info('**************************************************************************************')
+ logging.info('The DST data are provided by the World Data Center for Geomagnetism, Kyoto, and')
+ logging.info(' are not for redistribution (http://wdc.kugi.kyoto-u.ac.jp/). Furthermore, we thank')
+ logging.info(' the geomagnetic observatories (Kakioka [JMA], Honolulu and San Juan [USGS], Hermanus')
+ logging.info(' [RSA], Alibag [IIG]), NiCT, INTERMAGNET, and many others for their cooperation to')
+ logging.info(' make the Dst index available.')
+ logging.info('**************************************************************************************')
return 'kyoto_dst'+suffix
+
def parse_html(html_text, year=None, month=None):
times = []
data = []
diff --git a/pyspedas/kyoto/tests/tests.py b/pyspedas/kyoto/tests/tests.py
index ec67f534..e6ebcff2 100644
--- a/pyspedas/kyoto/tests/tests.py
+++ b/pyspedas/kyoto/tests/tests.py
@@ -1,13 +1,24 @@
-
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_load_dst_data(self):
+ # final
dst_vars = pyspedas.kyoto.dst(trange=['2015-10-15', '2015-10-16'])
self.assertTrue(data_exists('kyoto_dst'))
+ # provisional
+ dst_vars = pyspedas.kyoto.dst(trange=['2019-10-15', '2019-10-16'])
+ self.assertTrue(data_exists('kyoto_dst'))
+ # real time
+ dst_vars = pyspedas.kyoto.dst(trange=['2022-7-15', '2022-7-16'])
+ self.assertTrue(data_exists('kyoto_dst'))
+
+ def test_errors(self):
+ pyspedas.kyoto.dst(trange=None)
+ pyspedas.kyoto.dst(trange=['1015-10-15', '1015-10-16'])
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/lanl/README.md b/pyspedas/lanl/README.md
new file mode 100644
index 00000000..6d2b98e6
--- /dev/null
+++ b/pyspedas/lanl/README.md
@@ -0,0 +1,35 @@
+
+## LANL
+The routines in this module can be used to load data from the LANL mission.
+
+### Instruments
+- Magnetospheric Plasma Analyzer (MPA)
+- Synchronous Orbit Particle Analyzer (SPA)
+
+### Examples
+Get started by importing pyspedas and tplot; these are required to load and plot the data:
+
+```python
+import pyspedas
+from pytplot import tplot
+```
+
+#### Magnetospheric Plasma Analyzer (MPA)
+
+```python
+mpa_vars = pyspedas.lanl.mpa(trange=['2004-10-31', '2004-11-01'])
+
+tplot(['dens_lop', 'vel_lop'])
+```
+
+
+#### Synchronous Orbit Particle Analyzer (SPA)
+
+```python
+spa_vars = pyspedas.lanl.spa(trange=['2004-10-31', '2004-11-01'])
+
+tplot(['spa_p_temp', 'spa_e_temp'])
+```
+
+
+
\ No newline at end of file
diff --git a/pyspedas/lanl/__init__.py b/pyspedas/lanl/__init__.py
new file mode 100644
index 00000000..c4b98e24
--- /dev/null
+++ b/pyspedas/lanl/__init__.py
@@ -0,0 +1,174 @@
+from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
+
+def mpa(trange=['2004-10-31', '2004-11-01'],
+ probe='l1',
+ level='k0',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Magnetospheric Plasma Analyzer (MPA)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str
+ LANL probe #; Valid options:
+ 'l0' for LANL 1990 data
+ 'l1' for LANL 1991 data
+ 'l4' for LANL 1994 data
+ 'l7' for LANL 1997 data
+ 'l9' for LANL 1989 data
+ 'a1' for LANL 2001 data
+ 'a2' for LANL 2002 data
+
+ level: str
+ Data level; options: 'k0' (default: k0)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='mpa', trange=trange, level=level, probe=probe, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return mpa_postprocessing(tvars)
+
+
+def mpa_postprocessing(variables):
+ """
+ Placeholder for MPA post-processing
+ """
+ return variables
+
+
+def spa(trange=['2004-10-31', '2004-11-01'],
+ probe='l1',
+ level='k0',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Synchronous Orbit Particle Analyzer (SPA)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str
+ LANL S/C probe #; Valid options:
+ 'l0' for LANL 1990 data
+ 'l1' for LANL 1991 data
+ 'l4' for LANL 1994 data
+ 'l7' for LANL 1997 data
+ 'l9' for LANL 1989 data
+ 'a1' for LANL 2001 data
+ 'a2' for LANL 2002 data
+
+ level: str
+ Data level; options: 'k0' (default: k0)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='spa', trange=trange, level=level, probe=probe, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return spa_postprocessing(tvars)
+
+
+def spa_postprocessing(variables):
+ """
+ Placeholder for SPA post-processing
+ """
+ return variables
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='LANL', instrument=instrument, label=label)
diff --git a/pyspedas/lanl/config.py b/pyspedas/lanl/config.py
new file mode 100644
index 00000000..91744cb6
--- /dev/null
+++ b/pyspedas/lanl/config.py
@@ -0,0 +1,12 @@
+import os
+
+CONFIG = {'local_data_dir': 'lanl_data/',
+ 'remote_data_dir': 'https://spdf.gsfc.nasa.gov/pub/data/lanl/'}
+
+# override local data directory with environment variables
+if os.environ.get('SPEDAS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'lanl'])
+
+if os.environ.get('LANL_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.environ['LANL_DATA_DIR']
+
\ No newline at end of file
diff --git a/pyspedas/lanl/load.py b/pyspedas/lanl/load.py
new file mode 100644
index 00000000..88fb2b9c
--- /dev/null
+++ b/pyspedas/lanl/load.py
@@ -0,0 +1,68 @@
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pytplot import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+LANL_SC = {'l0': '90',
+ 'l1': '91',
+ 'l4': '94',
+ 'l7': '97',
+ 'l9': '89',
+ 'a1': '01a',
+ 'a2': '02a'}
+
+
+def load(trange=['2004-10-31', '2004-11-01'],
+ instrument='mpa',
+ probe='a1',
+ level='k0',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the LANL mission; this function is not meant
+ to be called directly; instead, see the wrappers:
+
+ pyspedas.lanl.mpa
+ pyspedas.lanl.spa
+
+ """
+
+ probe = probe.lower()
+
+ pathformat = LANL_SC[probe]+'_'+instrument+'/%Y/'+probe+'_'+level+'_'+instrument+'_%Y%m%d_v??.cdf'
+
+ # find the full remote path names using the trange
+ remote_names = dailynames(file_format=pathformat, trange=trange)
+
+ out_files = []
+
+ files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update)
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ out_files = sorted(out_files)
+
+ if downloadonly:
+ return out_files
+
+ tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
+
+ if notplot:
+ return tvars
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
+
+
\ No newline at end of file
diff --git a/pyspedas/lanl/tests/__init__.py b/pyspedas/lanl/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/lanl/tests/tests.py b/pyspedas/lanl/tests/tests.py
new file mode 100644
index 00000000..515d5b1c
--- /dev/null
+++ b/pyspedas/lanl/tests/tests.py
@@ -0,0 +1,34 @@
+import os
+import unittest
+from pytplot import data_exists
+import pyspedas
+
+
+class LoadTestCases(unittest.TestCase):
+ def test_load_mpa_data(self):
+ out_vars = pyspedas.lanl.mpa(time_clip=True)
+ self.assertTrue(data_exists('dens_lop'))
+ self.assertTrue(data_exists('vel_lop'))
+ self.assertTrue(data_exists('temp_lop'))
+
+ def test_load_spa_data(self):
+ out_vars = pyspedas.lanl.spa(time_clip=True)
+ self.assertTrue(data_exists('spa_p_temp'))
+ self.assertTrue(data_exists('spa_e_temp'))
+ self.assertTrue(data_exists('spa_a_flux'))
+ self.assertTrue(data_exists('spa_e_flx'))
+ self.assertTrue(data_exists('spa_p_flx'))
+
+ def test_load_notplot(self):
+ out_vars = pyspedas.lanl.spa(notplot=True)
+ self.assertTrue('spa_p_temp' in out_vars)
+
+ def test_downloadonly(self):
+ files = pyspedas.lanl.spa(downloadonly=True, trange=['2004-10-31', '2004-11-01'])
+ self.assertTrue(os.path.exists(files[0]))
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+
\ No newline at end of file
diff --git a/pyspedas/maven/__init__.py b/pyspedas/maven/__init__.py
index 472a4805..1b4f7a31 100644
--- a/pyspedas/maven/__init__.py
+++ b/pyspedas/maven/__init__.py
@@ -3,6 +3,7 @@
"""
from .maven_load import load_data
+import pyspedas.maven.spdf as spdf_load
def maven_load(filenames=None,
@@ -102,56 +103,94 @@ def maven_load(filenames=None,
get_metadata=get_metadata, auto_yes=auto_yes, varnames=varnames)
return tvars
+
def kp(trange=['2016-01-01', '2016-01-02'], datatype=None, varformat=None, get_support_data=False,
- auto_yes=True, downloadonly=False, insitu=True, iuvs=False, varnames=[]):
+ auto_yes=True, downloadonly=False, insitu=True, iuvs=False, varnames=[], spdf=False):
+ if spdf:
+ if datatype is None:
+ datatype = 'kp-4sec'
+ return spdf_load.kp(trange=trange, datatype=datatype, varformat=varformat, get_support_data=get_support_data)
return maven_load(start_date=trange[0], end_date=trange[1], type=datatype, level='kp', varformat=varformat, varnames=varnames,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, insitu=insitu, iuvs=iuvs)
-def mag(trange=['2016-01-01', '2016-01-02'], level='l2', datatype='ss', varformat=None, get_support_data=False,
- auto_yes=True, downloadonly=False, varnames=[]):
+
+def mag(trange=['2016-01-01', '2016-01-02'], level='l2', datatype=None, varformat=None, get_support_data=False,
+ auto_yes=True, downloadonly=False, varnames=[], spdf=False):
+ if spdf:
+ if datatype is None:
+ datatype = 'sunstate-1sec'
+ return spdf_load.mag(trange=trange, level=level, datatype=datatype, varformat=varformat, get_support_data=get_support_data,
+ downloadonly=downloadonly, varnames=varnames)
+ if datatype is None:
+ datatype = 'ss'
return maven_load(instruments='mag', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
-def sta(trange=['2016-01-01', '2016-01-02'], level='l2', datatype='2a', varformat=None, get_support_data=False,
- auto_yes=True, downloadonly=False, varnames=[]):
+
+def sta(trange=['2016-01-01', '2016-01-02'], level='l2', datatype=None, varformat=None, get_support_data=False,
+ auto_yes=True, downloadonly=False, varnames=[], spdf=False):
+ if spdf:
+ if datatype is None:
+ datatype = 'c0-64e2m'
+ return spdf_load.static(trange=trange, level=level, datatype=datatype, varformat=varformat, get_support_data=get_support_data,
+ downloadonly=downloadonly, varnames=varnames)
+ if datatype is None:
+ datatype = '2a'
return maven_load(instruments='sta', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_metadata=True, get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
+
def swea(trange=['2016-01-01', '2016-01-02'], level='l2', datatype='svyspec', varformat=None, get_support_data=False,
- auto_yes=True, downloadonly=False, varnames=[]):
+ auto_yes=True, downloadonly=False, varnames=[], spdf=False):
+ if spdf:
+ return spdf_load.swea(trange=trange, level=level, datatype=datatype, varformat=varformat, get_support_data=get_support_data,
+ downloadonly=downloadonly, varnames=varnames)
return maven_load(instruments='swe', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
+
def swia(trange=['2016-01-01', '2016-01-02'], level='l2', datatype='onboardsvyspec', varformat=None, get_support_data=False,
- auto_yes=True, downloadonly=False, varnames=[]):
+ auto_yes=True, downloadonly=False, varnames=[], spdf=False):
+ if spdf:
+ return spdf_load.swia(trange=trange, level=level, datatype=datatype, varformat=varformat, get_support_data=get_support_data,
+ downloadonly=downloadonly, varnames=varnames)
return maven_load(instruments='swi', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
+
def sep(trange=['2016-01-01', '2016-01-02'], level='l2', datatype='s2-cal-svy-full', varformat=None, get_support_data=False,
- auto_yes=True, downloadonly=False, varnames=[]):
+ auto_yes=True, downloadonly=False, varnames=[], spdf=False):
+ if spdf:
+ return spdf_load.sep(trange=trange, level=level, datatype=datatype, varformat=varformat, get_support_data=get_support_data,
+ downloadonly=downloadonly, varnames=varnames)
return maven_load(instruments='sep', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
+
def rse(trange=['2016-01-01', '2016-01-02'], level='l2', datatype=None, varformat=None, get_support_data=False,
auto_yes=True, downloadonly=False, varnames=[]):
return maven_load(instruments='rse', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
+
def lpw(trange=['2016-01-01', '2016-01-02'], level='l2', datatype='lpiv', varformat=None, get_support_data=False,
auto_yes=True, downloadonly=False, varnames=[]):
return maven_load(instruments='lpw', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
+
def euv(trange=['2016-01-01', '2016-01-02'], level='l2', datatype='bands', varformat=None, get_support_data=False,
auto_yes=True, downloadonly=False, varnames=[]):
return maven_load(instruments='euv', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
+
def iuv(trange=['2016-01-01', '2016-01-02'], level='l2', datatype=None, varformat=None, get_support_data=False,
auto_yes=True, downloadonly=False, varnames=[]):
return maven_load(instruments='iuv', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
get_support_data=get_support_data, auto_yes=auto_yes, download_only=downloadonly, varnames=varnames)
+
def ngi(trange=['2016-01-01', '2016-01-02'], level='l2', datatype=None, varformat=None, get_support_data=False,
auto_yes=True, downloadonly=False, varnames=[]):
return maven_load(instruments='ngi', start_date=trange[0], end_date=trange[1], type=datatype, level=level, varformat=varformat,
diff --git a/pyspedas/maven/maven_load.py b/pyspedas/maven/maven_load.py
index 0f5c6077..33425f76 100644
--- a/pyspedas/maven/maven_load.py
+++ b/pyspedas/maven/maven_load.py
@@ -1,3 +1,4 @@
+import logging
from dateutil.parser import parse
import os
@@ -25,7 +26,7 @@ def maven_filenames(filenames=None,
# Check for orbit num rather than time string
if isinstance(start_date, int) and isinstance(end_date, int):
- print("Orbit numbers specified, checking for updated orbit # file from naif.jpl.nasa.gov")
+ logging.info("Orbit numbers specified, checking for updated orbit # file from naif.jpl.nasa.gov")
get_orbit_files()
start_date, end_date = orbit_time(start_date, end_date)
start_date = parse(start_date)
@@ -91,7 +92,7 @@ def maven_filenames(filenames=None,
s = get_filenames(query, public)
if not s:
- print("No files found for {}.".format(instrument))
+ logging.error("No files found for {}.".format(instrument))
maven_files[instrument] = []
continue
@@ -112,7 +113,7 @@ def maven_filenames(filenames=None,
query = '&'.join(query_args)
s = get_filenames(query, public)
if not s:
- print("No files found for {}.".format(instrument))
+ logging.error("No files found for {}.".format(instrument))
maven_files[instrument] = []
else:
s = s.split(',')
@@ -203,7 +204,7 @@ def load_data(filenames=None,
if list_files:
for f in s:
- print(f)
+ logging.info(f)
return
if new_files:
@@ -213,8 +214,8 @@ def load_data(filenames=None,
s = get_new_files(bn_files_to_load, data_dir, instr, level)
if len(s) == 0:
continue
- print("Your request will download a total of: "+str(len(s))+" files for instrument "+str(instr))
- print('Would you like to proceed with the download? ')
+ logging.info("Your request will download a total of: "+str(len(s))+" files for instrument "+str(instr))
+ logging.info('Would you like to proceed with the download? ')
valid_response = False
cancel = False
if auto_yes:
@@ -225,11 +226,11 @@ def load_data(filenames=None,
valid_response = True
cancel = False
elif response == 'n' or response == 'N':
- print('Cancelled download. Returning...')
+ logging.error('Cancelled download. Returning...')
valid_response = True
cancel = True
else:
- print('Invalid input. Please answer with y or n.')
+ logging.error('Invalid input. Please answer with y or n.')
if cancel:
continue
diff --git a/pyspedas/maven/spdf/__init__.py b/pyspedas/maven/spdf/__init__.py
new file mode 100644
index 00000000..954434b2
--- /dev/null
+++ b/pyspedas/maven/spdf/__init__.py
@@ -0,0 +1,494 @@
+from .load import load
+
+
+def mag(trange=['2014-10-18', '2014-10-19'],
+ datatype='sunstate-1sec',
+ level='l2',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Magnetometer (MAG)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'sunstate-1sec' for L2 data
+
+ level: str
+ Data level; options: 'l1', 'l2' (default: l2)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='mag', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return mag_postprocessing(tvars)
+
+
+def mag_postprocessing(variables):
+ """
+ Placeholder for MAG post-processing
+ """
+ return variables
+
+
+def swea(trange=['2014-10-18', '2014-10-19'],
+ datatype='arc3d',
+ level='l2',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Solar Wind Electron Analyzer (SWEA)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'arc3d' for L2 data
+ 'arcpad' for L2 data
+ 'svy3d' for L2 data
+ 'svypad' for L2 data
+ 'svyspec' for L2 data
+
+ level: str
+ Data level; options: 'l2' (default: l2)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='swea', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return swea_postprocessing(tvars)
+
+
+def swea_postprocessing(variables):
+ """
+ Placeholder for SWEA post-processing
+ """
+ return variables
+
+
+def swia(trange=['2014-10-18', '2014-10-19'],
+ datatype='onboardsvyspec',
+ level='l2',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Solar Wind Ion Analyzer (SWIA)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'onboardsvyspec' for L2 data
+ 'onboardsvymom' for L2 data
+ 'finesvy3d' for L2 data
+ 'finearc3d' for L2 data
+ 'coarsesvy3d' for L2 data
+ 'coarsearc3d' for L2 data
+
+ level: str
+ Data level; options: 'l2' (default: l2)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='swia', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return swia_postprocessing(tvars)
+
+
+def swia_postprocessing(variables):
+ """
+ Placeholder for SWIA post-processing
+ """
+ return variables
+
+
+def static(trange=['2014-10-18', '2014-10-19'],
+ datatype='c0-64e2m',
+ level='l2',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the SupraThermal And Thermal Ion Composition (STATIC)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'c0-64e2m' for L2 data
+ 'c2-32e32m' for L2 data
+ 'c4-4e64m' for L2 data
+ 'c6-32e64m' for L2 data
+ 'c8-32e16d' for L2 data
+ 'ca-16e4d16a' for L2 data
+ 'cc-32e8d32m' for L2 data
+ 'cd-32e8d32m' for L2 data
+ 'ce-16e4d16a16m' for L2 data
+ 'cf-16e4d16a16m' for L2 data
+ 'd0-32e4d16a8m' for L2 data
+ 'd1-32e4d16a8m' for L2 data
+ 'd4-4d16a2m' for L2 data
+ 'd6-events' for L2 data
+ 'd7-fsthkp' for L2 data
+ 'd8-12r1e' for L2 data
+ 'd9-12r64e' for L2 data
+ 'da-1r64e' for L2 data
+ 'db-1024tof' for L2 data
+ 'hkp' for L2 data
+
+ level: str
+ Data level; options: 'l2' (default: l2)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='static', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return static_postprocessing(tvars)
+
+
+def static_postprocessing(variables):
+ """
+ Placeholder for STATIC post-processing
+ """
+ return variables
+
+
+def sep(trange=['2014-10-18', '2014-10-19'],
+ datatype='s1-cal-svy-full',
+ level='l2',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Solar Energetic Particle (SEP) instrument
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 's1-cal-svy-full' for L2 data
+ 's2-cal-svy-full' for L2 data
+
+ level: str
+ Data level; options: 'l2' (default: l2)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='sep', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return sep_postprocessing(tvars)
+
+
+def sep_postprocessing(variables):
+ """
+ Placeholder for SEP post-processing
+ """
+ return variables
+
+
+def kp(trange=['2014-10-18', '2014-10-19'],
+ datatype='kp-4sec',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Key Parameters (KP)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'kp-4sec' for 4 second KP data
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars = load(instrument='kp', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return kp_postprocessing(tvars)
+
+
+def kp_postprocessing(variables):
+ """
+ Placeholder for KP post-processing
+ """
+ return variables
diff --git a/pyspedas/maven/spdf/config.py b/pyspedas/maven/spdf/config.py
new file mode 100644
index 00000000..b4abec25
--- /dev/null
+++ b/pyspedas/maven/spdf/config.py
@@ -0,0 +1,12 @@
+import os
+
+CONFIG = {'local_data_dir': 'maven/',
+ 'remote_data_dir': 'https://spdf.gsfc.nasa.gov/pub/data/maven/'}
+
+# override local data directory with environment variables
+if os.environ.get('SPEDAS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'maven'])
+
+if os.environ.get('MAVEN_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.environ['MAVEN_DATA_DIR']
+
\ No newline at end of file
diff --git a/pyspedas/maven/spdf/load.py b/pyspedas/maven/spdf/load.py
new file mode 100644
index 00000000..4e7b176b
--- /dev/null
+++ b/pyspedas/maven/spdf/load.py
@@ -0,0 +1,73 @@
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pytplot import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+
+def load(trange=['2014-10-18', '2014-10-19'],
+ instrument='mag',
+ datatype='',
+ level='l2',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the MAVEN mission; this function is not meant
+ to be called directly; instead, see the wrappers:
+
+ pyspedas.maven.mag
+ pyspedas.maven.swea
+ pyspedas.maven.swia
+ pyspedas.maven.static
+ pyspedas.maven.sep
+ pyspedas.maven.kp
+
+ """
+
+ if instrument == 'mag':
+ pathformat = f"{instrument}/{level}/{datatype}/cdfs/%Y/%m/mvn_{instrument}_{level}-{datatype}_%Y%m%d_v??_r??.cdf"
+ elif instrument == 'swea':
+ pathformat = f"{instrument}/{level}/{datatype}/%Y/%m/mvn_swe_{level}_{datatype}_%Y%m%d_v??_r??.cdf"
+ elif instrument == 'swia':
+ pathformat = f"{instrument}/{level}/{datatype}/%Y/%m/mvn_swi_{level}_{datatype}_%Y%m%d_v??_r??.cdf"
+ elif instrument == 'static':
+ pathformat = f"{instrument}/{level}/{datatype}/%Y/%m/mvn_sta_{level}_{datatype}_%Y%m%d_v??_r??.cdf"
+ elif instrument == 'sep':
+ pathformat = f"{instrument}/{level}/{datatype}/%Y/%m/mvn_{instrument}_{level}_{datatype}_%Y%m%d_v??_r??.cdf"
+ elif instrument == 'kp':
+ # https://spdf.gsfc.nasa.gov/pub/data/maven/insitu/kp-4sec/cdfs/2016/
+ pathformat = f"insitu/{datatype}/cdfs/%Y/%m/mvn_insitu_{datatype}_%Y%m%d_v??_r??.cdf"
+
+ # find the full remote path names using the trange
+ remote_names = dailynames(file_format=pathformat, trange=trange)
+
+ out_files = []
+
+ files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update)
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ out_files = sorted(out_files)
+
+ if downloadonly:
+ return out_files
+
+ tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat,
+ varnames=varnames, notplot=notplot)
+
+ if notplot:
+ return tvars
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
diff --git a/pyspedas/maven/tests/tests.py b/pyspedas/maven/tests/tests.py
index 0b41cd7f..0c2061ee 100644
--- a/pyspedas/maven/tests/tests.py
+++ b/pyspedas/maven/tests/tests.py
@@ -1,15 +1,17 @@
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
from pyspedas import maven
+from pyspedas.maven.download_files_utilities import get_orbit_files, merge_orbit_files
+
+
+class OrbitTestCases(unittest.TestCase):
+ def test_get_merge_orbit_files(self):
+ get_orbit_files()
+ merge_orbit_files()
+ self.assertTrue(os.path.join(os.path.join(os.path.dirname(__file__), '..'), 'maven_orb_rec.orb'))
-# class OrbitTestCases(unittest.TestCase):
-# def test_get_merge_orbit_files(self):
-# from pyspedas.maven.download_files_utilities import get_orbit_files, merge_orbit_files
-# get_orbit_files()
-# merge_orbit_files()
-# self.assertTrue(os.path.join(os.path.join(os.path.dirname(__file__), '..'), 'maven_orb_rec.orb'))
class LoadTestCases(unittest.TestCase):
def test_load_kp_data(self):
@@ -17,7 +19,7 @@ def test_load_kp_data(self):
self.assertTrue(data_exists('mvn_kp::spacecraft::geo_x'))
def test_load_mag_data(self):
- data = maven.mag()
+ data = maven.mag(datatype='ss1s')
self.assertTrue(data_exists('OB_B'))
def test_load_sta_data(self):
diff --git a/pyspedas/maven/utilities.py b/pyspedas/maven/utilities.py
index 35121eaf..9b08dbd1 100644
--- a/pyspedas/maven/utilities.py
+++ b/pyspedas/maven/utilities.py
@@ -3,6 +3,7 @@
# This software was developed at the University of Colorado's Laboratory for Atmospheric and Space Physics.
# Verify current version before use at: https://github.com/MAVENSDC/Pydivide
+import logging
import re
import os
from . import download_files_utilities as utils
@@ -36,10 +37,10 @@ def param_list(kp):
param_list_.append("#%3d %s" % (index, base_tag))
index += 1
else:
- print('*****WARNING*****')
- print('Returning INCOMPLETE Parameter List')
- print('Base tag neither DataFrame nor Series')
- print('Plese check read_insitu_file definition')
+ logging.warning('*****WARNING*****')
+ logging.warning('Returning INCOMPLETE Parameter List')
+ logging.warning('Base tag neither DataFrame nor Series')
+ logging.warning('Plese check read_insitu_file definition')
return param_list_
diff --git a/pyspedas/mica/load.py b/pyspedas/mica/load.py
index 85b5d35c..e6a76c98 100644
--- a/pyspedas/mica/load.py
+++ b/pyspedas/mica/load.py
@@ -1,7 +1,7 @@
-
+import logging
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
@@ -24,9 +24,9 @@ def load(trange=['2019-02-01','2019-02-02'],
"""
if site is None:
- print('A valid MICA site code name must be entered.')
- print('Current site codes include: ')
- print('NAL, LYR, LOR, ISR, SDY, IQA, SNK, MCM, SPA, JBS, NEV, HAL, PG2[3,4,5]')
+ logging.error('A valid MICA site code name must be entered.')
+ logging.error('Current site codes include: ')
+ logging.error('NAL, LYR, LOR, ISR, SDY, IQA, SNK, MCM, SPA, JBS, NEV, HAL, PG2[3,4,5]')
return
pathformat = site.upper()+'/%Y/%m/mica_ulf_'+site.lower()+'_%Y%m%d_v??.cdf'
diff --git a/pyspedas/mica/tests/tests.py b/pyspedas/mica/tests/tests.py
index 7bcf54f5..4b80a37e 100644
--- a/pyspedas/mica/tests/tests.py
+++ b/pyspedas/mica/tests/tests.py
@@ -1,15 +1,18 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_load_none_data(self):
induction_vars = pyspedas.mica.induction()
self.assertTrue(induction_vars == None)
+ def test_load_notplot(self):
+ induction_vars = pyspedas.mica.induction(site='nal', notplot=True)
+ self.assertTrue('spectra_x_1Hz_NAL' in induction_vars)
+
def test_load_NAL_data(self):
induction_vars = pyspedas.mica.induction(site='nal', time_clip=True)
self.assertTrue(data_exists('spectra_x_1Hz_NAL'))
@@ -18,5 +21,6 @@ def test_downloadonly(self):
files = pyspedas.mica.induction(site='nal', downloadonly=True, trange=['2014-2-15', '2014-2-16'])
self.assertTrue(os.path.exists(files[0]))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/mms/aspoc/aspoc.py b/pyspedas/mms/aspoc/aspoc.py
index 123cfa83..dbd795ad 100644
--- a/pyspedas/mms/aspoc/aspoc.py
+++ b/pyspedas/mms/aspoc/aspoc.py
@@ -1,18 +1,19 @@
from pyspedas.mms.mms_load_data import mms_load_data
from pyspedas.mms.print_vars import print_vars
+
@print_vars
def mms_load_aspoc(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy',
level='l2', datatype='', varformat=None, varnames=[], get_support_data=False, suffix='', time_clip=False, no_update=False,
available=False, notplot=False, latest_version=False, major_version=False, min_version=None, cdf_version=None,
spdf=False, always_prompt=False):
"""
- This function loads ASPOC data into tplot variables
+ Load data from the Active Spacecraft Potential Control (ASPOC)
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -52,11 +53,11 @@ def mms_load_aspoc(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='sr
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -76,12 +77,13 @@ def mms_load_aspoc(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='sr
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ --------
List of tplot variables created.
"""
diff --git a/pyspedas/mms/cotrans/mms_cotrans_lmn.py b/pyspedas/mms/cotrans/mms_cotrans_lmn.py
index 4dbef0d1..a62d4eae 100644
--- a/pyspedas/mms/cotrans/mms_cotrans_lmn.py
+++ b/pyspedas/mms/cotrans/mms_cotrans_lmn.py
@@ -1,8 +1,10 @@
+"""
+This function transforms MMS vector fields from GSM coordinates to LMN (boundary-normal) coordinates using the Shue et al., 1998 magnetopause model. The input and output tplot variables are specified by name_in and name_out, respectively. Additional optional parameters include specifying the input data coordinates (GSM or GSE), probe, and data rate. The function returns the name of the output variable containing the data in LMN coordinates.
+"""
import numpy as np
import logging
-from pytplot import get_data, store_data, options
-from pyspedas.cotrans.cotrans_get_coord import cotrans_get_coord
+from pytplot import get_data, store_data, options, get_coords
from pyspedas.cotrans.cotrans import cotrans
from pyspedas.cotrans.gsm2lmn import gsm2lmn
from pyspedas.mms import mec
@@ -11,8 +13,9 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_cotrans_lmn(name_in, name_out, gsm=False, gse=False, probe=None, data_rate='srvy'):
- '''
+ """
Tranforms MMS vector fields from GSM coordinates to LMN (boundary-normal) coordinates
using the Shue et al., 1998 magnetopause model
@@ -44,7 +47,7 @@ def mms_cotrans_lmn(name_in, name_out, gsm=False, gse=False, probe=None, data_ra
--------
Name of the variable containing the data in LMN coordinates.
- '''
+ """
data_in = get_data(name_in)
metadata_in = get_data(name_in, metadata=True)
@@ -53,7 +56,7 @@ def mms_cotrans_lmn(name_in, name_out, gsm=False, gse=False, probe=None, data_ra
logging.error('Error reading tplot variable: ' + name_in)
return None
- data_in_coord = cotrans_get_coord(name_in).lower()
+ data_in_coord = get_coords(name_in).lower()
if data_in_coord != 'gse' and data_in_coord != 'gsm' and not gse and not gsm:
logging.error('Please specify the coordinate system of the input data.')
@@ -96,7 +99,28 @@ def mms_cotrans_lmn(name_in, name_out, gsm=False, gse=False, probe=None, data_ra
else:
logging.error('Problem creating tplot variable.')
+
def solarwind_load(trange, level='hro2', min5=False):
+ """
+ Loads solar wind data for use in the GSM to LMN transformation.
+
+ Parameters
+ ----------
+ trange: list of float
+ Time range of data to be loaded
+
+ level: str
+ Data level (default: hro2)
+
+ min5: bool
+ Flag indicating whether to load 1 minute or 5 minute data (default: 1 minute)
+
+ Returns
+ -------
+ Numpy array of solar wind data with shape (N, 3), where N is the number of time points
+ and the columns are the time, Bz GSM, and pressure.
+ """
+
if min5:
datatype = '5min'
else:
diff --git a/pyspedas/mms/cotrans/mms_cotrans_qrotate.py b/pyspedas/mms/cotrans/mms_cotrans_qrotate.py
index 05e1bbfa..e4f7ce97 100644
--- a/pyspedas/mms/cotrans/mms_cotrans_qrotate.py
+++ b/pyspedas/mms/cotrans/mms_cotrans_qrotate.py
@@ -1,11 +1,19 @@
-from pytplot import get_data, store_data
-from pyspedas import cotrans_set_coord, tinterpol
+"""
+This module provides functions for transforming MMS vector fields from one coordinate system to another using quaternion rotation.
+
+To use this module, you will need to install the SpacePy package: pip install spacepy.
+
+The main function of this module is mms_cotrans_qrotate, which performs a quaternion rotation on a PyTplot variable. The function takes in the names of the input and quaternion tplot variables, the name of the output tplot variable, and the coordinate system for the output data. An optional inverse flag allows the user to use the quaternion conjugate on the quaternion data prior to rotating. If the data and quaternion tplot variables are not the same length, the function will interpolate the data to the quaternion timestamps.
+"""
+import logging
+from pytplot import get_data, store_data, set_coords
+from pyspedas import tinterpol
try:
import spacepy.coordinates as coord
except ImportError:
- print('SpacePy must be installed to use this module.')
- print('Please install it using: pip install spacepy')
+ logging.error('SpacePy must be installed to use this module.')
+ logging.error('Please install it using: pip install spacepy')
def mms_cotrans_qrotate(in_name, q_name, out_name, out_coord, inverse=False):
@@ -34,8 +42,16 @@ def mms_cotrans_qrotate(in_name, q_name, out_name, out_coord, inverse=False):
q_data = get_data(q_name)
+ if data is None:
+ logging.error(f'Problem reading input tplot variable: {in_name}')
+ return
+
+ if q_data is None:
+ logging.error(f'Problem reading quaternion variable: {q_name}')
+ return
+
if len(data.times) != len(q_data.times):
- print('Interpolating the data to the MEC quaternion time stamps.')
+ logging.info('Interpolating the data to the MEC quaternion time stamps.')
tinterpol(in_name, q_name)
data = get_data(in_name + '-itrp')
@@ -48,5 +64,5 @@ def mms_cotrans_qrotate(in_name, q_name, out_name, out_coord, inverse=False):
saved = store_data(out_name, data={'x': data.times, 'y': out_data}, attr_dict=metadata)
if saved:
- cotrans_set_coord(out_name, out_coord)
+ set_coords(out_name, out_coord)
diff --git a/pyspedas/mms/cotrans/mms_cotrans_qtransformer.py b/pyspedas/mms/cotrans/mms_cotrans_qtransformer.py
index a200edb9..6f202e3c 100644
--- a/pyspedas/mms/cotrans/mms_cotrans_qtransformer.py
+++ b/pyspedas/mms/cotrans/mms_cotrans_qtransformer.py
@@ -1,4 +1,18 @@
-from pytplot import tplot_copy
+"""
+This module contains the function mms_cotrans_qtransformer, which performs a quaternion rotation on a tplot variable from one coordinate system to another. It does this by recursively rotating through the ECI coordinate system.
+
+The mms_cotrans_qtransformer function takes in four required arguments:
+
+in_name: the name of the tplot variable to be transformed
+out_name: the name of the tplot variable to be created with the transformed data
+in_coord: the coordinate system of the input data
+out_coord: the coordinate system to rotate to
+It also takes an optional argument:
+
+probe: the spacecraft probe number (default is '1')
+The function returns the name of the output tplot variable.
+"""
+from pytplot import tplot_copy, get
from .mms_cotrans_qrotate import mms_cotrans_qrotate
@@ -36,11 +50,17 @@ def mms_cotrans_qtransformer(in_name, out_name, in_coord, out_coord, probe='1'):
if in_coord == 'eci':
q_name = 'mms' + probe + '_mec_quat_eci_to_' + out_coord
+ q_data = get(q_name)
+ if q_data is None:
+ return
mms_cotrans_qrotate(in_name, q_name, out_name, out_coord)
recursive_in_coord = out_coord
else:
q_name = 'mms' + probe + '_mec_quat_eci_to_' + in_coord
+ q_data = get(q_name)
+ if q_data is None:
+ return
mms_cotrans_qrotate(in_name, q_name, out_name, out_coord, inverse=True)
recursive_in_coord = 'eci'
- return mms_cotrans_qtransformer(out_name, out_name, recursive_in_coord, out_coord, probe=probe)
\ No newline at end of file
+ return mms_cotrans_qtransformer(out_name, out_name, recursive_in_coord, out_coord, probe=probe)
diff --git a/pyspedas/mms/cotrans/mms_qcotrans.py b/pyspedas/mms/cotrans/mms_qcotrans.py
index b02ee0b0..1064670f 100644
--- a/pyspedas/mms/cotrans/mms_qcotrans.py
+++ b/pyspedas/mms/cotrans/mms_qcotrans.py
@@ -1,12 +1,27 @@
-from pyspedas import cotrans_get_coord
-from pytplot import get_data
+"""
+Performs coordinate transformations for MMS data using MMS MEC quaternions.
+
+This function uses the mms_cotrans_qtransformer function to recursively transform the input data from the input coordinate system to the output coordinate system by going through ECI. The transformation operations are performed using the SpacePy library.
+
+Parameters:
+in_name (str or list of str): Names of Tplot variables of vectors to be transformed.
+out_name (str or list of str): Names of output variables.
+in_coord (str): Input coordinate system (e.g. 'bcs', 'gse', 'gse2000', 'gsm', 'sm', 'geo', 'eci').
+out_coord (str): Output coordinate system (e.g. 'bcs', 'gse', 'gse2000', 'gsm', 'sm', 'geo', 'eci').
+probe (str): MMS spacecraft # (must be '1', '2', '3', or '4').
+
+Returns:
+list of str: List of variables created.
+"""
+import logging
+from pytplot import get_data, get_coords
from .mms_cotrans_qtransformer import mms_cotrans_qtransformer
try:
import spacepy.coordinates as coord
except ImportError:
- print('SpacePy must be installed to use this module.')
- print('Please install it using: pip install spacepy')
+ logging.error('SpacePy must be installed to use this module.')
+ logging.error('Please install it using: pip install spacepy')
def mms_qcotrans(in_name=None, out_name=None, in_coord=None, out_coord=None, probe=None):
@@ -41,11 +56,11 @@ def mms_qcotrans(in_name=None, out_name=None, in_coord=None, out_coord=None, pro
valid_coords = ['bcs', 'dbcs', 'dmpa', 'smpa', 'dsl', 'ssl', 'gse', 'gse2000', 'gsm', 'sm', 'geo', 'eci', 'j2000']
if in_name is None:
- print('Input variable name is missing')
+ logging.error('Input variable name is missing')
return
if out_name is None:
- print('Output variable name is missing')
+ logging.error('Output variable name is missing')
return
if not isinstance(in_name, list):
@@ -58,10 +73,10 @@ def mms_qcotrans(in_name=None, out_name=None, in_coord=None, out_coord=None, pro
for idx, variable in enumerate(in_name):
if in_coord is None:
- var_coords = cotrans_get_coord(variable)
+ var_coords = get_coords(variable)
in_coord = var_coords
if var_coords is None:
- print('Could not determine coordinate system for: ' + variable)
+ logging.error('Could not determine coordinate system for: ' + variable)
continue
if isinstance(in_coord, list):
@@ -72,7 +87,7 @@ def mms_qcotrans(in_name=None, out_name=None, in_coord=None, out_coord=None, pro
var_coords = var_coords.lower()
if var_coords not in valid_coords:
- print('Unsupported input coordinate system: ' + var_coords)
+ logging.error('Unsupported input coordinate system: ' + var_coords)
continue
if isinstance(out_coord, list):
@@ -80,19 +95,23 @@ def mms_qcotrans(in_name=None, out_name=None, in_coord=None, out_coord=None, pro
else:
new_coords = out_coord
+ if new_coords is None:
+ logging.error('Output coordinate system is missing.')
+ return
+
new_coords = new_coords.lower()
if new_coords not in valid_coords:
- print('Unsupported output coordinate system: ' + new_coords)
+ logging.error('Unsupported output coordinate system: ' + new_coords)
if var_coords in ['bcs', 'ssl'] or new_coords in ['bcs', 'ssl']:
- print('WARNING: there are issues transforming data to/from a spinning coordinate system')
+ logging.warning('WARNING: there are issues transforming data to/from a spinning coordinate system')
# find the probe, if it's not specified by the user
if probe is None:
name_pieces = variable.split('_')
if len(name_pieces) <= 1:
- print('Probe could not be determined from: ' + variable + '; defaulting to probe 1')
+ logging.warning('Probe could not be determined from: ' + variable + '; defaulting to probe 1')
probe = '1'
else:
probe = name_pieces[0][-1]
@@ -101,11 +120,15 @@ def mms_qcotrans(in_name=None, out_name=None, in_coord=None, out_coord=None, pro
probe = str(probe)
if probe not in valid_probes:
- print('Unknown probe for variable: ' + variable + '; continuing without transforming...')
+ logging.error('Unknown probe for variable: ' + variable + '; continuing without transforming...')
continue
transformed = mms_cotrans_qtransformer(variable, out_name[idx], var_coords, new_coords, probe=probe)
+ if transformed is None:
+ logging.error('Problem occurred during the transformation; ensure that the MEC quaternions are loaded and try again.')
+ return
+
if transformed is not None:
out_vars.append(transformed)
diff --git a/pyspedas/mms/dsp/dsp.py b/pyspedas/mms/dsp/dsp.py
index 39554df0..cbd0ba6b 100644
--- a/pyspedas/mms/dsp/dsp.py
+++ b/pyspedas/mms/dsp/dsp.py
@@ -3,18 +3,19 @@
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
+
@print_vars
def mms_load_dsp(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy',
level='l2', datatype='bpsd', varformat=None, varnames=[], suffix='', get_support_data=False,
time_clip=False, no_update=False, available=False, notplot=False, latest_version=False,
major_version=False, min_version=None, cdf_version=None, spdf=False, always_prompt=False):
"""
- This function loads DSP data into tplot variables
+ Load data from the Digital Signal Processing (DSP) board.
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -54,11 +55,11 @@ def mms_load_dsp(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -78,12 +79,13 @@ def mms_load_dsp(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ --------
List of tplot variables created.
"""
@@ -97,4 +99,4 @@ def mms_load_dsp(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
mms_dsp_set_metadata(probe, data_rate, level, suffix=suffix)
- return tvars
\ No newline at end of file
+ return tvars
diff --git a/pyspedas/mms/dsp/mms_dsp_set_metadata.py b/pyspedas/mms/dsp/mms_dsp_set_metadata.py
index 4f542eaa..bb1cc6fd 100644
--- a/pyspedas/mms/dsp/mms_dsp_set_metadata.py
+++ b/pyspedas/mms/dsp/mms_dsp_set_metadata.py
@@ -1,6 +1,7 @@
from pytplot import options
from pyspedas import tnames
+
def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
"""
This function updates the metadata for DSP data products
@@ -21,9 +22,12 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
no suffix is added.
"""
- if not isinstance(probe, list): probe = [probe]
- if not isinstance(data_rate, list): data_rate = [data_rate]
- if not isinstance(level, list): level = [level]
+ if not isinstance(probe, list):
+ probe = [probe]
+ if not isinstance(data_rate, list):
+ data_rate = [data_rate]
+ if not isinstance(level, list):
+ level = [level]
instrument = 'dsp'
@@ -39,7 +43,6 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
- options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'spedas')
if 'mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP BPSD SCM2')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'ysubtitle', '[Hz]')
@@ -47,7 +50,6 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
- options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'spedas')
if 'mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP BPSD SCM3')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'ysubtitle', '[Hz]')
@@ -55,7 +57,6 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
- options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'spedas')
if 'mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP BPSD')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'ysubtitle', '[Hz]')
@@ -63,7 +64,6 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
- options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'spedas')
if 'mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP EPSD')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'ysubtitle', '[Hz]')
@@ -71,7 +71,6 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'zlog', True)
- options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'Colormap', 'spedas')
if 'mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP EPSD-X')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'ysubtitle', '[Hz]')
@@ -79,7 +78,6 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'zlog', True)
- options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'Colormap', 'spedas')
if 'mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP EPSD-Y')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'ysubtitle', '[Hz]')
@@ -87,7 +85,6 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'zlog', True)
- options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'Colormap', 'spedas')
if 'mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP EPSD-Z')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'ysubtitle', '[Hz]')
@@ -95,4 +92,3 @@ def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'zlog', True)
- options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'Colormap', 'spedas')
\ No newline at end of file
diff --git a/pyspedas/mms/edi/edi.py b/pyspedas/mms/edi/edi.py
index cdca2803..4a649d48 100644
--- a/pyspedas/mms/edi/edi.py
+++ b/pyspedas/mms/edi/edi.py
@@ -3,18 +3,19 @@
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
+
@print_vars
def mms_load_edi(trange=['2016-10-16', '2016-10-17'], probe='1', data_rate='srvy', level='l2', datatype='efield',
varformat=None, varnames=[], get_support_data=False, suffix='', time_clip=False, no_update=False,
available=False, notplot=False, latest_version=False, major_version=False, min_version=None, cdf_version=None,
spdf=False, always_prompt=False):
"""
- This function loads EDI data into tplot variables
+ Load data from the Electron Drift Instrument (EDI)
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -54,11 +55,11 @@ def mms_load_edi(trange=['2016-10-16', '2016-10-17'], probe='1', data_rate='srvy
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -78,12 +79,13 @@ def mms_load_edi(trange=['2016-10-16', '2016-10-17'], probe='1', data_rate='srvy
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ --------
List of tplot variables created.
"""
diff --git a/pyspedas/mms/edi/mms_edi_set_metadata.py b/pyspedas/mms/edi/mms_edi_set_metadata.py
index eeb7bdd0..57d8d76c 100644
--- a/pyspedas/mms/edi/mms_edi_set_metadata.py
+++ b/pyspedas/mms/edi/mms_edi_set_metadata.py
@@ -1,6 +1,7 @@
from pytplot import options
from pyspedas import tnames
+
def mms_edi_set_metadata(probe, data_rate, level, suffix=''):
"""
This function updates the metadata for EDI data products
@@ -34,25 +35,19 @@ def mms_edi_set_metadata(probe, data_rate, level, suffix=''):
for this_lvl in level:
if 'mms'+str(this_probe)+'_'+instrument+'_vdrift_dsl_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_vdrift_dsl_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDI drift velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_vdrift_dsl_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_vdrift_dsl_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Vx DSL', 'Vy DSL', 'Vz DSL'])
if 'mms'+str(this_probe)+'_'+instrument+'_vdrift_gse_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_vdrift_gse_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDI drift velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_vdrift_gse_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_vdrift_gse_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Vx GSE', 'Vy GSE', 'Vz GSE'])
if 'mms'+str(this_probe)+'_'+instrument+'_vdrift_gsm_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_vdrift_gsm_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDI drift velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_vdrift_gsm_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_vdrift_gsm_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Vx GSM', 'Vy GSM', 'Vz GSM'])
if 'mms'+str(this_probe)+'_'+instrument+'_e_dsl_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_e_dsl_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDI e-field')
- options('mms'+str(this_probe)+'_'+instrument+'_e_dsl_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_e_dsl_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Ex DSL', 'Ey DSL', 'Ez DSL'])
if 'mms'+str(this_probe)+'_'+instrument+'_e_gse_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_e_gse_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDI e-field')
- options('mms'+str(this_probe)+'_'+instrument+'_e_gse_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_e_gse_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Ex GSE', 'Ey GSE', 'Ez GSE'])
if 'mms'+str(this_probe)+'_'+instrument+'_e_gsm_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_e_gsm_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDI e-field')
- options('mms'+str(this_probe)+'_'+instrument+'_e_gsm_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r'])
- options('mms'+str(this_probe)+'_'+instrument+'_e_gsm_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Ex GSM', 'Ey GSM', 'Ez GSM'])
\ No newline at end of file
+ options('mms'+str(this_probe)+'_'+instrument+'_e_gsm_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Ex GSM', 'Ey GSM', 'Ez GSM'])
diff --git a/pyspedas/mms/edp/edp.py b/pyspedas/mms/edp/edp.py
index 3191d8e1..0c2b322c 100644
--- a/pyspedas/mms/edp/edp.py
+++ b/pyspedas/mms/edp/edp.py
@@ -3,18 +3,19 @@
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
+
@print_vars
def mms_load_edp(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast', level='l2', datatype='dce',
varformat=None, varnames=[], get_support_data=False, suffix='', time_clip=True, no_update=False,
available=False, notplot=False, latest_version=False, major_version=False, min_version=None, cdf_version=None,
spdf=False, always_prompt=False):
"""
- This function loads EDP data into tplot variables
+ Load data from the Electric field Double Probes (EDP) instrument
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -54,11 +55,11 @@ def mms_load_edp(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -78,15 +79,23 @@ def mms_load_edp(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ --------
List of tplot variables created.
"""
+
+ # as of 20 June 2023, there's a mixture of v2.x.x and v3.x.x files at the SDC
+ # these files aren't compatible, so we need to only load the latest major version
+ # to avoid crashes (unless otherwise specified)
+ if not latest_version and not major_version and min_version is None and cdf_version is None:
+ major_version = True
+
tvars = mms_load_data(trange=trange, notplot=notplot, probe=probe, data_rate=data_rate, level=level, instrument='edp',
datatype=datatype, varformat=varformat, varnames=varnames, get_support_data=get_support_data, suffix=suffix,
time_clip=time_clip, no_update=no_update, available=available, latest_version=latest_version,
diff --git a/pyspedas/mms/edp/mms_edp_set_metadata.py b/pyspedas/mms/edp/mms_edp_set_metadata.py
index 9411cff9..a43644d1 100644
--- a/pyspedas/mms/edp/mms_edp_set_metadata.py
+++ b/pyspedas/mms/edp/mms_edp_set_metadata.py
@@ -1,6 +1,7 @@
from pytplot import options
from pyspedas import tnames
+
def mms_edp_set_metadata(probe, data_rate, level, suffix=''):
"""
This function updates the metadata for EDP data products
@@ -34,11 +35,9 @@ def mms_edp_set_metadata(probe, data_rate, level, suffix=''):
for this_lvl in level:
if 'mms'+str(this_probe)+'_'+instrument+'_dce_gse_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_dce_gse_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDP DCE')
- options('mms'+str(this_probe)+'_'+instrument+'_dce_gse_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_dce_gse_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Ex GSE', 'Ey GSE', 'Ez GSE'])
if 'mms'+str(this_probe)+'_'+instrument+'_dce_dsl_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_dce_dsl_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDP DCE')
- options('mms'+str(this_probe)+'_'+instrument+'_dce_dsl_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_dce_dsl_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Ex DSL', 'Ey DSL', 'Ez DSL'])
if 'mms'+str(this_probe)+'_'+instrument+'_hfesp_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_hfesp_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' EDP HFesp')
@@ -46,4 +45,3 @@ def mms_edp_set_metadata(probe, data_rate, level, suffix=''):
options('mms'+str(this_probe)+'_'+instrument+'_hfesp_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_hfesp_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_hfesp_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
- options('mms'+str(this_probe)+'_'+instrument+'_hfesp_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'jet')
\ No newline at end of file
diff --git a/pyspedas/mms/eis/eis.py b/pyspedas/mms/eis/eis.py
index dd2f0c43..8bc7bcd2 100644
--- a/pyspedas/mms/eis/eis.py
+++ b/pyspedas/mms/eis/eis.py
@@ -4,21 +4,21 @@
from pyspedas.mms.eis.mms_eis_set_metadata import mms_eis_set_metadata
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
-
from pyspedas import tnames
+
@print_vars
def mms_load_eis(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy', level='l2', datatype='extof',
varformat=None, varnames=[], get_support_data=True, suffix='', time_clip=False, no_update=False,
available=False, notplot=False, latest_version=False, major_version=False, min_version=None, cdf_version=None,
spdf=False, always_prompt=False):
"""
- This function loads EIS data into tplot variables
+ Load data from the Energetic Ion Spectrometer (EIS)
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -58,11 +58,11 @@ def mms_load_eis(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -82,12 +82,13 @@ def mms_load_eis(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ ----------
List of tplot variables created.
"""
@@ -97,7 +98,7 @@ def mms_load_eis(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
time_clip=time_clip, no_update=no_update, available=available, latest_version=latest_version,
major_version=major_version, min_version=min_version, cdf_version=cdf_version, spdf=spdf, always_prompt=always_prompt)
- if tvars == [] or available or notplot or CONFIG['download_only']:
+ if tvars == [] or available or notplot or CONFIG['download_only'] or tvars is None:
return tvars
if not isinstance(probe, list): probe = [probe]
diff --git a/pyspedas/mms/eis/mms_eis_omni.py b/pyspedas/mms/eis/mms_eis_omni.py
index c55051f2..c0e038bb 100644
--- a/pyspedas/mms/eis/mms_eis_omni.py
+++ b/pyspedas/mms/eis/mms_eis_omni.py
@@ -1,15 +1,14 @@
-
import logging
import numpy as np
-from pytplot import get_data, store_data, options
-from ...utilities.tnames import tnames
+from pytplot import get_data, store_data, options, tnames
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_eis_omni(probe, species='proton', datatype='extof', suffix='', data_units='flux', data_rate='srvy', level='l2'):
"""
- This function will calculate the omni-directional EIS spectrograms, and is automatically called from mms_load_eis
+ This function will calculate the omnidirectional EIS spectrograms, and is automatically called from mms_load_eis
Parameters
----------
@@ -34,8 +33,8 @@ def mms_eis_omni(probe, species='proton', datatype='extof', suffix='', data_unit
level: str
data level ['l1a','l1b','l2pre','l2' (default)]
-
- Returns:
+ Returns
+ ---------
Name of tplot variable created.
"""
@@ -74,7 +73,6 @@ def mms_eis_omni(probe, species='proton', datatype='extof', suffix='', data_unit
options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'ytitle', 'MMS' + probe + ' ' + datatype + ' ' + species)
options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'ysubtitle', 'Energy [keV]')
options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'yrange', [14, 45])
- options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'Colormap', 'spedas')
# create new variable with omni energy limits
energy_minus = get_data(prefix + species_str + '_t0_energy_dminus' + suffix)
diff --git a/pyspedas/mms/eis/mms_eis_pad.py b/pyspedas/mms/eis/mms_eis_pad.py
index a17df380..a82810a4 100644
--- a/pyspedas/mms/eis/mms_eis_pad.py
+++ b/pyspedas/mms/eis/mms_eis_pad.py
@@ -16,6 +16,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_eis_pad(scopes=['0', '1', '2', '3', '4', '5'], probe='1', level='l2',
data_rate='srvy', datatype='extof', species='proton', data_units='flux',
energy=[55, 800], size_pabin=15, suffix=''):
@@ -54,7 +55,8 @@ def mms_eis_pad(scopes=['0', '1', '2', '3', '4', '5'], probe='1', level='l2',
suffix: str
suffix of the loaded data
- Returns:
+ Returns
+ ----------
Name of tplot variables created.
"""
@@ -86,7 +88,6 @@ def mms_eis_pad(scopes=['0', '1', '2', '3', '4', '5'], probe='1', level='l2',
# the probes will need to be strings beyond this point
probe = [str(p) for p in probe]
-
logging.info('Calculating the EIS pitch angle distribution; this may take several minutes')
for probe_id in probe:
@@ -158,7 +159,6 @@ def mms_eis_pad(scopes=['0', '1', '2', '3', '4', '5'], probe='1', level='l2',
options(new_name, 'ylog', False)
options(new_name, 'zlog', True)
options(new_name, 'spec', True)
- options(new_name, 'Colormap', 'spedas')
options(new_name, 'ztitle', units_label)
options(new_name, 'ytitle', 'MMS' + str(probe_id) + ' ' + datatype_id + ' PA (deg)')
out_vars.append(new_name)
@@ -191,7 +191,6 @@ def mms_eis_pad(scopes=['0', '1', '2', '3', '4', '5'], probe='1', level='l2',
options(new_name, 'ylog', False)
options(new_name, 'zlog', True)
options(new_name, 'spec', True)
- options(new_name, 'Colormap', 'spedas')
options(new_name, 'ztitle', units_label)
options(new_name, 'ytitle', 'MMS' + str(probe_id) + ' ' + datatype_id + ' PA')
options(new_name, 'ysubtitle', '[deg]')
diff --git a/pyspedas/mms/eis/mms_eis_pad_spinavg.py b/pyspedas/mms/eis/mms_eis_pad_spinavg.py
index b6a5f213..e4fb8de0 100644
--- a/pyspedas/mms/eis/mms_eis_pad_spinavg.py
+++ b/pyspedas/mms/eis/mms_eis_pad_spinavg.py
@@ -1,4 +1,3 @@
-
import logging
import warnings
import numpy as np
@@ -16,6 +15,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_eis_pad_spinavg(scopes=['0','1','2','3','4','5'], probe='1',
data_rate='srvy', level='l2', datatype='extof', data_units='flux',
species='proton', energy=[55, 800], size_pabin=15, suffix=''):
@@ -54,7 +54,8 @@ def mms_eis_pad_spinavg(scopes=['0','1','2','3','4','5'], probe='1',
suffix: str
suffix of the loaded data
- Returns:
+ Returns
+ --------
Name of tplot variables created.
"""
@@ -111,10 +112,9 @@ def mms_eis_pad_spinavg(scopes=['0','1','2','3','4','5'], probe='1',
options(newname, 'ylog', False)
options(newname, 'zlog', True)
options(newname, 'spec', True)
- options(newname, 'Colormap', 'spedas')
options(newname, 'ztitle', units_label)
options(newname, 'ytitle', 'MMS' + str(probe) + ' ' + datatype + ' spin PAD')
options(newname, 'ysubtitle', '[deg]')
out_vars.append(newname)
- return out_vars
\ No newline at end of file
+ return out_vars
diff --git a/pyspedas/mms/eis/mms_eis_set_metadata.py b/pyspedas/mms/eis/mms_eis_set_metadata.py
index 7aeb2986..8ba6f924 100644
--- a/pyspedas/mms/eis/mms_eis_set_metadata.py
+++ b/pyspedas/mms/eis/mms_eis_set_metadata.py
@@ -1,6 +1,7 @@
from pyspedas import tnames
from pytplot import options
+
def mms_eis_set_metadata(tplotnames, data_rate='srvy', datatype='extof', suffix=''):
"""
This function updates the metadata for the EIS data products
@@ -31,4 +32,4 @@ def mms_eis_set_metadata(tplotnames, data_rate='srvy', datatype='extof', suffix=
options(tnames('*_extof_helium_flux_omni*'), 'x_interp', True)
options(tnames('*_extof_helium_flux_omni*'), 'y_interp', True)
options(tnames('*_extof_oxygen_flux_omni*'), 'x_interp', True)
- options(tnames('*_extof_oxygen_flux_omni*'), 'y_interp', True)
\ No newline at end of file
+ options(tnames('*_extof_oxygen_flux_omni*'), 'y_interp', True)
diff --git a/pyspedas/mms/eis/mms_eis_spec_combine_sc.py b/pyspedas/mms/eis/mms_eis_spec_combine_sc.py
index 28c0c44e..7267d51b 100644
--- a/pyspedas/mms/eis/mms_eis_spec_combine_sc.py
+++ b/pyspedas/mms/eis/mms_eis_spec_combine_sc.py
@@ -1,3 +1,4 @@
+import logging
import numpy as np
# use nanmean from bottleneck if it's installed, otherwise use the numpy one
# bottleneck nanmean is ~2.5x faster
@@ -6,14 +7,14 @@
nanmean = bn.nanmean
except ImportError:
nanmean = np.nanmean
-from pytplot import get_data, store_data, options
-from ...utilities.tnames import tnames
+from pytplot import get_data, store_data, options, tnames
+
def mms_eis_spec_combine_sc(
species='proton', data_units='flux', datatype='extof', data_rate='srvy',
level='l2', suffix='',
):
- '''
+ """
Combines omni-directional energy spectrogram variable from EIS on multiple
MMS spacecraft.
@@ -38,9 +39,10 @@ def mms_eis_spec_combine_sc(
species for calculation, e.g., proton, oxygen, alpha or electron
(default: 'proton')
- Returns:
+ Returns
+ --------
Name of tplot variables created.
- '''
+ """
## Thoughts for extensions:
## - Ensure arguments passed to modules are of lowecase
@@ -77,7 +79,7 @@ def mms_eis_spec_combine_sc(
if probes:
probe_string = probes[0]
else:
- print('No probes found from eis_sc_check tnames.')
+ logging.error('No probes found from eis_sc_check tnames.')
return
allmms_prefix = 'mmsx_epd_eis_' + data_rate + '_' + level + '_' + dtype + '_'
@@ -87,7 +89,7 @@ def mms_eis_spec_combine_sc(
omni_vars = tnames('mms?_epd_eis_'+data_rate+'_'+level+'_'+dtype+'_'+_species+'_'+data_units+'_omni'+suffix)
if not omni_vars:
- print('No EIS '+dtype+'data loaded!')
+ logging.error('No EIS '+dtype+'data loaded!')
return
time_size = np.zeros(len(probes))
@@ -147,7 +149,6 @@ def mms_eis_spec_combine_sc(
options(new_name, 'ylog', True)
options(new_name, 'zlog', True)
options(new_name, 'spec', True)
- options(new_name, 'Colormap', 'spedas')
options(new_name, 'ztitle', units_label)
options(new_name, 'ytitle', ' \\ '.join(['mms'+probe_string, _species.upper(), 'Energy [keV]']))
out_vars.append(new_name)
@@ -155,7 +156,7 @@ def mms_eis_spec_combine_sc(
# Spin-average the data
spin_nums = get_data(prefix+'spin'+suffix)
if spin_nums is None:
- print('Error: Could not find EIS spin variable -- now ending procedure.')
+ logging.error('Error: Could not find EIS spin variable -- now ending procedure.')
return
# find where the spin starts
diff --git a/pyspedas/mms/eis/mms_eis_spin_avg.py b/pyspedas/mms/eis/mms_eis_spin_avg.py
index 0f22eefd..518eb0f9 100644
--- a/pyspedas/mms/eis/mms_eis_spin_avg.py
+++ b/pyspedas/mms/eis/mms_eis_spin_avg.py
@@ -1,4 +1,3 @@
-
import logging
import warnings
import numpy as np
@@ -16,6 +15,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_eis_spin_avg(probe='1', species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix=''):
"""
This function will spin-average the EIS spectrograms, and is automatically called from mms_load_eis
@@ -43,7 +43,8 @@ def mms_eis_spin_avg(probe='1', species='proton', data_units='flux', datatype='e
suffix: str
suffix of the loaded data
- Returns:
+ Returns
+ --------
List of tplot variables created.
"""
prefix = 'mms' + probe + '_epd_eis_' + data_rate + '_' + level + '_'
@@ -100,9 +101,8 @@ def mms_eis_spin_avg(probe='1', species='proton', data_units='flux', datatype='e
options(this_scope + '_spin', 'spec', True)
options(this_scope + '_spin', 'ylog', True)
options(this_scope + '_spin', 'zlog', True)
- options(this_scope + '_spin', 'Colormap', 'spedas')
out_vars.append(this_scope + '_spin')
return out_vars
else:
logging.error('Error, problem finding EIS spin variable to calculate spin-averages')
- return None
\ No newline at end of file
+ return
diff --git a/pyspedas/mms/feeps/feeps.py b/pyspedas/mms/feeps/feeps.py
index f176ead4..8cf457ff 100644
--- a/pyspedas/mms/feeps/feeps.py
+++ b/pyspedas/mms/feeps/feeps.py
@@ -9,6 +9,8 @@
from pyspedas.mms.feeps.mms_feeps_spin_avg import mms_feeps_spin_avg
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
+from pytplot import time_clip as tclip
+
@print_vars
def mms_load_feeps(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy',
@@ -16,12 +18,12 @@ def mms_load_feeps(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='sr
no_update=False, available=False, notplot=False, no_flatfield_corrections=False, data_units=['count_rate', 'intensity'],
latest_version=False, major_version=False, min_version=None, cdf_version=None, spdf=False, always_prompt=False):
"""
- This function loads FEEPS data into tplot variables
+ Load data from the Fly's Eye Energetic Particle Sensor (FEEPS)
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -63,11 +65,11 @@ def mms_load_feeps(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='sr
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -87,21 +89,29 @@ def mms_load_feeps(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='sr
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ ---------
List of tplot variables created.
"""
+
+ # as of 3 July 2023, there's a mixture of v7.x.x and v6.x.x files at the SDC
+ # these files aren't compatible, so we need to only load the latest major version
+ # to avoid crashes (unless otherwise specified)
+ if not latest_version and not major_version and min_version is None and cdf_version is None:
+ major_version = True
+
tvars = mms_load_data(trange=trange, notplot=notplot, probe=probe, data_rate=data_rate, level=level, instrument='feeps',
datatype=datatype, varformat=varformat, varnames=varnames, get_support_data=get_support_data, suffix=suffix,
- time_clip=time_clip, no_update=no_update, available=available, latest_version=latest_version,
+ no_update=no_update, available=available, latest_version=latest_version,
major_version=major_version, min_version=min_version, cdf_version=cdf_version, spdf=spdf, always_prompt=always_prompt)
- if tvars == [] or available or notplot or CONFIG['download_only']:
+ if tvars == [] or available or notplot or CONFIG['download_only'] or tvars is None:
return tvars
probes = probe if isinstance(probe, list) else [probe]
@@ -121,19 +131,33 @@ def mms_load_feeps(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='sr
for lvl in levels:
for drate in data_rates:
for datatype in datatypes:
- mms_feeps_remove_bad_data(trange=trange, probe=probe, data_rate=drate, datatype =datatype, level=lvl, suffix=suffix)
+ mms_feeps_remove_bad_data(trange=trange, probe=probe, data_rate=drate, datatype =datatype, level=lvl, suffix=suffix)
+
+ for data_unit in data_units:
+ eyes = mms_feeps_active_eyes(trange, probe, drate, datatype, lvl)
+
+ split_vars = mms_feeps_split_integral_ch(data_unit, datatype, probe, suffix=suffix, data_rate=drate, level=lvl, sensor_eyes=eyes)
+
+ sun_removed_vars = mms_feeps_remove_sun(eyes, trange, probe=probe, datatype=datatype, data_units=data_unit, data_rate=drate, level=lvl, suffix=suffix)
+
+ omni_vars = mms_feeps_omni(eyes, probe=probe, datatype=datatype, data_units=data_unit, data_rate=drate, level=lvl, suffix=suffix)
+
+ if split_vars is not None:
+ tvars = tvars + split_vars
- for data_unit in data_units:
- eyes = mms_feeps_active_eyes(trange, probe, drate, datatype, lvl)
+ if sun_removed_vars is not None:
+ tvars = tvars + sun_removed_vars
- split_vars = mms_feeps_split_integral_ch(data_unit, datatype, probe, suffix=suffix, data_rate=drate, level=lvl, sensor_eyes=eyes)
+ if omni_vars is not None:
+ tvars = tvars + omni_vars
- sun_removed_vars = mms_feeps_remove_sun(eyes, trange, probe=probe, datatype=datatype, data_units=data_unit, data_rate=drate, level=lvl, suffix=suffix)
+ spin_avg_vars = mms_feeps_spin_avg(probe=probe, data_units=data_unit, datatype=datatype, data_rate=drate, level=lvl, suffix=suffix)
- omni_vars = mms_feeps_omni(eyes, probe=probe, datatype=datatype, data_units=data_unit, data_rate=drate, level=lvl, suffix=suffix)
+ if spin_avg_vars is not None:
+ tvars.append(spin_avg_vars)
- tvars = tvars + split_vars + sun_removed_vars + omni_vars
-
- tvars.append(mms_feeps_spin_avg(probe=probe, data_units=data_unit, datatype=datatype, data_rate=drate, level=lvl, suffix=suffix))
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
return tvars
diff --git a/pyspedas/mms/feeps/mms_feeps_active_eyes.py b/pyspedas/mms/feeps/mms_feeps_active_eyes.py
index 27fc4575..848e718d 100644
--- a/pyspedas/mms/feeps/mms_feeps_active_eyes.py
+++ b/pyspedas/mms/feeps/mms_feeps_active_eyes.py
@@ -1,5 +1,7 @@
+import numpy as np
from pyspedas import time_double
+
def mms_feeps_active_eyes(trange, probe, data_rate, species, level):
"""
This function returns the FEEPS active eyes, based on date/probe/species/rate
@@ -69,6 +71,8 @@ def mms_feeps_active_eyes(trange, probe, data_rate, species, level):
if isinstance(trange[0], str):
start_time = time_double(trange[0])
+ elif isinstance(trange[0], np.datetime64):
+ start_time = np.int64(trange[0]) / 1e9
else:
start_time = trange[0]
@@ -90,5 +94,3 @@ def mms_feeps_active_eyes(trange, probe, data_rate, species, level):
return {'top': [5, 11, 12], 'bottom': []}
return sensors
-
-
diff --git a/pyspedas/mms/feeps/mms_feeps_correct_energies.py b/pyspedas/mms/feeps/mms_feeps_correct_energies.py
index 652a74c2..7606bfdb 100644
--- a/pyspedas/mms/feeps/mms_feeps_correct_energies.py
+++ b/pyspedas/mms/feeps/mms_feeps_correct_energies.py
@@ -1,13 +1,15 @@
-from pytplot import get_data, store_data
+from pytplot import get, store
from .mms_feeps_energy_table import mms_feeps_energy_table
from pyspedas import tnames
+
def mms_feeps_correct_energies(probes, data_rate, level='l2', suffix=''):
"""
This function modifies the energy table in FEEPS spectra (intensity, count_rate, counts) variables
using the function: mms_feeps_energy_table (which is s/c, sensor head and sensor ID dependent)
- Parameters:
+ Parameters
+ -------------
probes: list of str
list of probes #, e.g., '4' for MMS4
@@ -20,7 +22,8 @@ def mms_feeps_correct_energies(probes, data_rate, level='l2', suffix=''):
suffix: str
suffix of the loaded data
- Notes:
+ Notes
+ -------
BAD EYES are replaced by NaNs
"""
types = ['top', 'bottom']
@@ -43,7 +46,7 @@ def mms_feeps_correct_energies(probes, data_rate, level='l2', suffix=''):
else:
var_name = var_name[0]
- var_data = get_data(var_name)
+ var_data = get(var_name)
if var_data is not None:
times, data, energies = var_data
else:
@@ -52,6 +55,6 @@ def mms_feeps_correct_energies(probes, data_rate, level='l2', suffix=''):
energy_map = mms_feeps_energy_table(probe, sensor_type[0:3], sensor)
try:
- store_data(var_name, data={'x': times, 'y': data, 'v': energy_map})
+ store(var_name, data={'x': times, 'y': data, 'v': energy_map})
except:
- continue
\ No newline at end of file
+ continue
diff --git a/pyspedas/mms/feeps/mms_feeps_energy_table.py b/pyspedas/mms/feeps/mms_feeps_energy_table.py
index 75110ea6..ab4790fa 100644
--- a/pyspedas/mms/feeps/mms_feeps_energy_table.py
+++ b/pyspedas/mms/feeps/mms_feeps_energy_table.py
@@ -1,5 +1,6 @@
import numpy as np
+
def mms_feeps_energy_table(probe, eye, sensor_id):
"""
This function returns the energy table based on
@@ -9,7 +10,8 @@ def mms_feeps_energy_table(probe, eye, sensor_id):
from Drew Turner, 1/19/2017
- Parameters:
+ Parameters
+ -------------
probe: str
probe #, e.g., '4' for MMS4
@@ -19,10 +21,12 @@ def mms_feeps_energy_table(probe, eye, sensor_id):
sensor_id: int
sensor ID
- Returns:
+ Returns
+ --------
Energy table
- Notes:
+ Notes
+ --------
BAD EYES are replaced by NaNs
- different original energy tables are used depending on if the sensor head is 6-8 (ions) or not (electrons)
@@ -51,4 +55,4 @@ def mms_feeps_energy_table(probe, eye, sensor_id):
mms_energies = [33.200000, 51.900000, 70.600000, 89.400000, 107.10000, 125.20000, 146.50000, 171.30000,
200.20000, 234.00000, 273.40000, 319.40000, 373.20000, 436.00000, 509.20000, 575.80000]
- return [energy+ table['mms'+probe+'-'+eye][sensor_id-1] for energy in mms_energies]
\ No newline at end of file
+ return [energy+ table['mms'+probe+'-'+eye][sensor_id-1] for energy in mms_energies]
diff --git a/pyspedas/mms/feeps/mms_feeps_flat_field_corrections.py b/pyspedas/mms/feeps/mms_feeps_flat_field_corrections.py
index e590ab97..dfcb5dab 100644
--- a/pyspedas/mms/feeps/mms_feeps_flat_field_corrections.py
+++ b/pyspedas/mms/feeps/mms_feeps_flat_field_corrections.py
@@ -1,8 +1,8 @@
import numpy as np
-
-from pytplot import get_data, store_data
+from pytplot import get, store
from pyspedas import tnames
+
def mms_feeps_flat_field_corrections(probes = ['1', '2', '3', '4'], data_rate = 'brst', suffix = ''):
"""
Apply flat field correction factors to FEEPS ion/electron data;
@@ -12,7 +12,8 @@ def mms_feeps_flat_field_corrections(probes = ['1', '2', '3', '4'], data_rate =
from Drew Turner, 1/19/2017
- Parameters:
+ Parameters
+ ---------------
probes: list of str
list of probes #, e.g., '4' for MMS4
@@ -22,7 +23,8 @@ def mms_feeps_flat_field_corrections(probes = ['1', '2', '3', '4'], data_rate =
suffix: str
suffix of the loaded data
- Notes:
+ Notes
+ ---------------
From Drew Turner, 1/18/17:
Here are the correction factors that we need to apply to the current
ION counts/rates/fluxes in the CDF files.
@@ -118,35 +120,32 @@ def mms_feeps_flat_field_corrections(probes = ['1', '2', '3', '4'], data_rate =
i_var = tnames('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+species_id+'_'+sensor_type+'_intensity_sensorid_'+sensor_id+suffix)
c_var = tnames('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+species_id+'_'+sensor_type+'_counts_sensorid_'+sensor_id+suffix)
- if cr_var == []:
+ if not cr_var:
count_rate = None
else:
- count_rate = get_data(cr_var[0])
+ count_rate = get(cr_var[0])
if count_rate is not None:
cr_times, cr_data, cr_energies = count_rate
if np.isnan(cr_energies).all():
continue
- store_data(cr_var[0], data={'x': cr_times, 'y': cr_data*correction, 'v': cr_energies})
+ store(cr_var[0], data={'x': cr_times, 'y': cr_data*correction, 'v': cr_energies})
- if i_var == []:
+ if not i_var:
intensity = None
else:
- intensity = get_data(i_var[0])
+ intensity = get(i_var[0])
if intensity is not None:
i_times, i_data, i_energies = intensity
if np.isnan(i_energies).all():
continue
- store_data(i_var[0], data={'x': i_times, 'y': i_data*correction, 'v': i_energies})
+ store(i_var[0], data={'x': i_times, 'y': i_data*correction, 'v': i_energies})
- if c_var == []:
+ if not c_var:
counts = None
else:
- counts = get_data(c_var[0])
+ counts = get(c_var[0])
if counts is not None:
c_times, c_data, c_energies = counts
if np.isnan(c_energies).all():
continue
- store_data(c_var[0], data={'x': c_times, 'y': c_data*correction, 'v': c_energies})
-
-
-
+ store(c_var[0], data={'x': c_times, 'y': c_data*correction, 'v': c_energies})
diff --git a/pyspedas/mms/feeps/mms_feeps_getgyrophase.py b/pyspedas/mms/feeps/mms_feeps_getgyrophase.py
index dfc963b7..49609193 100644
--- a/pyspedas/mms/feeps/mms_feeps_getgyrophase.py
+++ b/pyspedas/mms/feeps/mms_feeps_getgyrophase.py
@@ -1,7 +1,9 @@
+import logging
import numpy as np
from pyspedas import mms, tinterpol, time_double
from pyspedas.mms.feeps.mms_feeps_active_eyes import mms_feeps_active_eyes
-from pytplot import get_data, store_data, options
+from pytplot import get, store, options
+
def mms_feeps_getgyrophase(trange=['2017-07-11/22:30', '2017-07-11/22:35'], probe='2', data_rate='brst', level='l2', datatype='electron'):
"""
@@ -15,11 +17,11 @@ def mms_feeps_getgyrophase(trange=['2017-07-11/22:30', '2017-07-11/22:35'], prob
"""
mec_vars = mms.mec(trange=trange, probe=probe, data_rate=data_rate)
if mec_vars is None:
- print('Problem loading MEC data for calculating FEEPS gyrophase angles')
+ logging.error('Problem loading MEC data for calculating FEEPS gyrophase angles')
- qeci2sm = get_data('mms'+probe+'_mec_quat_eci_to_sm')
- qeci2bcs = get_data('mms'+probe+'_mec_quat_eci_to_bcs')
- rsun = get_data('mms'+probe+'_mec_r_sun_de421_eci')
+ qeci2sm = get('mms'+probe+'_mec_quat_eci_to_sm', units=False)
+ qeci2bcs = get('mms'+probe+'_mec_quat_eci_to_bcs', units=False)
+ rsun = get('mms'+probe+'_mec_r_sun_de421_eci', units=False)
rsunbcs = np.zeros((len(rsun.times), 3))
rduskbcs = np.zeros((len(rsun.times), 3))
@@ -49,13 +51,13 @@ def mms_feeps_getgyrophase(trange=['2017-07-11/22:30', '2017-07-11/22:35'], prob
# Now convert to BCS:
rduskbcs[i, :] = np.array([R[0,0]*rduskeci[0] + R[1,0]*rduskeci[1] + R[2,0]*rduskeci[2], R[0,1]*rduskeci[0] + R[1,1]*rduskeci[1] + R[2,1]*rduskeci[2], R[0,2]*rduskeci[0] + R[1,2]*rduskeci[1] + R[2,2]*rduskeci[2]])
- saved = store_data('mms'+probe+'_mec_r_sun_bcs', data = {'x': rsun.times, 'y': rsunbcs})
+ saved = store('mms'+probe+'_mec_r_sun_bcs', data = {'x': rsun.times, 'y': rsunbcs})
if not saved:
- print('Problem saving r_sun_bcs')
+ logging.error('Problem saving r_sun_bcs')
- saved = store_data('mms'+probe+'_mec_r_dusk_bcs', data = {'x': rsun.times, 'y': rduskbcs})
+ saved = store('mms'+probe+'_mec_r_dusk_bcs', data = {'x': rsun.times, 'y': rduskbcs})
if not saved:
- print('Problem saving r_dusk_bcs')
+ logging.error('Problem saving r_dusk_bcs')
# Rotation matrices for FEEPS coord system (FCS) into body coordinate system (BCS):
Ttop = np.array([[1./np.sqrt(2.), -1./np.sqrt(2.), 0], [1./np.sqrt(2.), 1./np.sqrt(2.), 0], [0, 0, 1]]).T
@@ -156,12 +158,12 @@ def mms_feeps_getgyrophase(trange=['2017-07-11/22:30', '2017-07-11/22:35'], prob
fgm_vars = mms.fgm(trange=[time_double(trange[0])-600, time_double(trange[1])+600], probe=probe, data_rate='srvy')
if fgm_vars is None:
- print('Problem loading FGM vars for calculating FEEPS gyrophase angles')
+ logging.error('Problem loading FGM vars for calculating FEEPS gyrophase angles')
# interpolate the FGM var to the MEC var timestamps
tinterpol('mms'+probe+'_fgm_b_bcs_srvy_l2_bvec', 'mms'+probe+'_mec_r_sun_bcs', newname='mms'+probe+'_fgm_b_bcs_srvy_l2_bvec_int')
- B = get_data('mms'+probe+'_fgm_b_bcs_srvy_l2_bvec_int')
+ B = get('mms'+probe+'_fgm_b_bcs_srvy_l2_bvec_int')
# Now calculate gyrophase
# Telescope vectors perp to B:
@@ -204,6 +206,9 @@ def mms_feeps_getgyrophase(trange=['2017-07-11/22:30', '2017-07-11/22:35'], prob
for j in range(24):
th1 = np.arccos(np.nansum(Tperp[i,:,j] * Sperp)/(np.sqrt(np.nansum(Tperp[i,:,j]**2))*np.sqrt(np.nansum(Sperp**2))))
th2 = np.arccos(np.nansum(Tperp[i,:,j] * Dperp)/(np.sqrt(np.nansum(Tperp[i,:,j]**2))*np.sqrt(np.nansum(Dperp**2))))
+ # strip the units
+ th1 = th1.value
+ th2 = th2.value
if th1 <= np.pi/2.0 and th2 < np.pi/2:
phi[i, j] = 2*np.pi - th1
if th1 < np.pi/2.0 and th2 >= np.pi/2.0:
@@ -213,9 +218,9 @@ def mms_feeps_getgyrophase(trange=['2017-07-11/22:30', '2017-07-11/22:35'], prob
if th1 >= np.pi/2.0 and th2 > np.pi/2.0:
phi[i, j] = th1
- saved = store_data('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase', data={'x': rsun.times, 'y': phi*180./np.pi})
+ saved = store('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase', data={'x': rsun.times, 'y': phi*180./np.pi})
if not saved:
- print('Problem saving gyrophase angles')
+ logging.error('Problem saving gyrophase angles')
return
options('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase', 'yrange', [0, 360.0])
@@ -224,10 +229,10 @@ def mms_feeps_getgyrophase(trange=['2017-07-11/22:30', '2017-07-11/22:35'], prob
eyes = mms_feeps_active_eyes(trange, probe, data_rate, datatype, level)
sensor_types = ['top', 'bottom']
- feepst = get_data('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_spinsectnum')
+ feepst = get('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_spinsectnum')
indt = np.zeros(len(feepst.times), dtype='int32')
- gpd = get_data('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase')
+ gpd = get('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase')
for i in range(len(feepst.times)):
indt[i] = np.argwhere(np.abs(gpd.times - feepst.times[i]) == np.min(np.abs(gpd.times - feepst.times[i]))).flatten()[0]
@@ -240,7 +245,7 @@ def mms_feeps_getgyrophase(trange=['2017-07-11/22:30', '2017-07-11/22:35'], prob
for i in range(len(iT)):
gp_data[:, i] = gpd.y[indt, iT[i]]
- saved = store_data('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_gyrophase', data = {'x': gpd.times[indt], 'y': gp_data})
+ saved = store('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_gyrophase', data = {'x': gpd.times[indt], 'y': gp_data})
if saved:
options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_gyrophase', 'yrange', [0.0, 360.0])
diff --git a/pyspedas/mms/feeps/mms_feeps_gpd.py b/pyspedas/mms/feeps/mms_feeps_gpd.py
index 050fc0cb..9bcfffaf 100644
--- a/pyspedas/mms/feeps/mms_feeps_gpd.py
+++ b/pyspedas/mms/feeps/mms_feeps_gpd.py
@@ -1,9 +1,18 @@
+import logging
import numpy as np
import pyspedas
-from pytplot import get_data, store_data, options
+from pytplot import get, store, options
from pyspedas.mms.feeps.mms_feeps_active_eyes import mms_feeps_active_eyes
from pyspedas.mms.feeps.mms_feeps_getgyrophase import mms_feeps_getgyrophase
+# use nanmean from bottleneck if it's installed, otherwise use the numpy one
+# bottleneck nanmean is ~2.5x faster
+try:
+ import bottleneck as bn
+ nanmean = bn.nanmean
+except ImportError:
+ nanmean = np.nanmean
+
def mms_feeps_gpd(trange=['2017-07-11/22:30', '2017-07-11/22:35'],
probe='2',
@@ -57,7 +66,7 @@ def mms_feeps_gpd(trange=['2017-07-11/22:30', '2017-07-11/22:35'],
feeps_data = pyspedas.mms.feeps(trange=trange, data_rate=data_rate, probe=probe, level=level)
if len(feeps_data) == 0:
- print('Problem loading FEEPS data for this time range.')
+ logging.error('Problem loading FEEPS data for this time range.')
return
# Account for angular response (finite field of view) of instruments
@@ -73,10 +82,10 @@ def mms_feeps_gpd(trange=['2017-07-11/22:30', '2017-07-11/22:35'],
# get the gyrophase angles
# calculate the gyro phase angles from the magnetic field data
gyro_vars = mms_feeps_getgyrophase(trange=trange, probe=probe, data_rate=data_rate, level=level, datatype=datatype)
- gyro_data = get_data('mms' + str(probe) + '_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_gyrophase')
+ gyro_data = get('mms' + str(probe) + '_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_gyrophase')
if gyro_data is None or gyro_vars is None:
- print('Problem calculating gyrophase angles.')
+ logging.error('Problem calculating gyrophase angles.')
return
eyes = mms_feeps_active_eyes(trange, probe, data_rate, datatype, level)
@@ -119,17 +128,17 @@ def mms_feeps_gpd(trange=['2017-07-11/22:30', '2017-07-11/22:35'],
for isen in range(len(particle_idxs)): # loop through sensors
# get the data
var_name = 'mms' + str(probe) + '_epd_feeps_' + data_rate + '_' + level + '_' + datatype + '_' + sensor_type + '_' + data_units + '_sensorid_' + str(particle_idxs[isen]+1) + '_clean_sun_removed'
- data = get_data(var_name)
+ data = get(var_name)
if data is None:
- print('Data not found: ' + var_name)
+ logging.error('Data not found: ' + var_name)
continue
data.y[data.y == 0.0] = np.nan # remove any 0s before averaging
# Energy indices to use:
indx = np.argwhere((data.v <= energy[1]) & (data.v >= energy[0]))
if len(indx) == 0:
- print('Energy range selected is not covered by the detector for FEEPS ' + datatype + ' data')
+ logging.error('Energy range selected is not covered by the detector for FEEPS ' + datatype + ' data')
continue
- dflux[:, pa_map[isen]] = np.nanmean(data.y[:, indx], axis=1).flatten()
+ dflux[:, pa_map[isen]] = nanmean(data.y[:, indx], axis=1).flatten()
dpa[:, pa_map[isen]] = gyro_data.y[:, pa_map[isen]].flatten()
# we need to replace the 0.0s left in after populating dpa with NaNs; these
@@ -143,9 +152,14 @@ def mms_feeps_gpd(trange=['2017-07-11/22:30', '2017-07-11/22:35'],
# Now loop through PA bins and time, find the telescopes where there is data in those bins and average it up!
for it in range(len(dpa[:, 0])):
for ipa in range(int(n_bins)):
- ind = np.argwhere((dpa[it, :] + dAngResp >= gyro_centers[ipa]-delta_gyro) & (dpa[it, :] - dAngResp < gyro_centers[ipa]+delta_gyro))
- if len(ind) > 0:
- gyro_flux[it, ipa] = np.nanmean(dflux[it, ind], axis=0).flatten()
+ ind = np.argwhere((dpa[it, :] + dAngResp >= gyro_centers[ipa]-delta_gyro) & (dpa[it, :] - dAngResp < gyro_centers[ipa]+delta_gyro)).flatten()
+ if ind.size != 0:
+ if len(ind) > 1:
+ gyro_flux[it, ipa] = nanmean(dflux[it, ind])
+ else:
+ gyro_flux[it, ipa] = dflux[it, ind[0]]
+ #if len(ind) > 0:
+ # gyro_flux[it, ipa] = np.nanmean(dflux[it, ind], axis=0).flatten()
# fill any missed bins with NAN
gyro_flux[gyro_flux == 0.0] = np.nan
@@ -154,7 +168,7 @@ def mms_feeps_gpd(trange=['2017-07-11/22:30', '2017-07-11/22:35'],
new_name = 'mms' + str(probe) + '_epd_feeps_' + data_rate + '_' + level + '_' + datatype + '_' + data_units + '_' + en_range_string + '_gpd'
- saved = store_data(new_name, data={'x': gyro_data.times, 'y': gyro_flux, 'v': gyro_centers})
+ saved = store(new_name, data={'x': gyro_data.times, 'y': gyro_flux, 'v': gyro_centers})
if saved:
options(new_name, 'spec', True)
diff --git a/pyspedas/mms/feeps/mms_feeps_omni.py b/pyspedas/mms/feeps/mms_feeps_omni.py
index 08e7bc82..ff1c436a 100644
--- a/pyspedas/mms/feeps/mms_feeps_omni.py
+++ b/pyspedas/mms/feeps/mms_feeps_omni.py
@@ -1,8 +1,7 @@
-
import logging
import warnings
import numpy as np
-from pytplot import get_data, store_data, options
+from pytplot import get, store, options
# use nanmean from bottleneck if it's installed, otherwise use the numpy one
# bottleneck nanmean is ~2.5x faster
@@ -15,11 +14,13 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_feeps_omni(eyes, probe='1', datatype='electron', data_units='intensity', data_rate='srvy', level='l2', suffix=''):
"""
- This function will calculate the omni-directional FEEPS spectrograms, and is automatically called from mms_load_feeps
+ This function will calculate the omnidirectional FEEPS spectrograms, and is automatically called from mms_load_feeps
- Parameters:
+ Parameters
+ ------------
eyes: dict
Hash table containing the active sensor eyes
@@ -41,7 +42,8 @@ def mms_feeps_omni(eyes, probe='1', datatype='electron', data_units='intensity',
suffix: str
suffix of the loaded data
- Returns:
+ Returns
+ ------------
List of tplot variables created.
"""
@@ -92,7 +94,7 @@ def mms_feeps_omni(eyes, probe='1', datatype='electron', data_units='intensity',
top_sensors = eyes['top']
bot_sensors = eyes['bottom']
- tmpdata = get_data(prefix+data_rate+'_'+level+'_'+datatype+'_top_'+data_units+'_sensorid_'+str(top_sensors[0])+'_clean_sun_removed'+suffix)
+ tmpdata = get(prefix+data_rate+'_'+level+'_'+datatype+'_top_'+data_units+'_sensorid_'+str(top_sensors[0])+'_clean_sun_removed'+suffix)
if tmpdata is not None:
if level != 'sitl':
@@ -101,7 +103,7 @@ def mms_feeps_omni(eyes, probe='1', datatype='electron', data_units='intensity',
for idx, sensor in enumerate(top_sensors):
var_name = prefix+data_rate+'_'+level+'_'+datatype+'_top_'+data_units+'_sensorid_'+str(sensor)+'_clean_sun_removed'+suffix
- data = get_data(var_name)
+ data = get(var_name)
dalleyes[:, :, idx] = data[1]
try:
iE = np.where(np.abs(energies-data[2]) > en_chk*energies)
@@ -111,7 +113,7 @@ def mms_feeps_omni(eyes, probe='1', datatype='electron', data_units='intensity',
logging.warning('NaN in energy table encountered; sensor T' + str(sensor))
for idx, sensor in enumerate(bot_sensors):
var_name = prefix+data_rate+'_'+level+'_'+datatype+'_bottom_'+data_units+'_sensorid_'+str(sensor)+'_clean_sun_removed'+suffix
- data = get_data(var_name)
+ data = get(var_name)
dalleyes[:, :, idx+len(top_sensors)] = data[1]
try:
iE = np.where(np.abs(energies-data[2]) > en_chk*energies)
@@ -119,13 +121,13 @@ def mms_feeps_omni(eyes, probe='1', datatype='electron', data_units='intensity',
dalleyes[:, iE[0], idx+len(top_sensors)] = np.nan
except Warning:
logging.warning('NaN in energy table encountered; sensor B' + str(sensor))
- else: # sitl data
+ else: # sitl data
dalleyes = np.empty((len(tmpdata[0]), len(tmpdata[2]), len(top_sensors)))
dalleyes[:] = np.nan
for idx, sensor in enumerate(top_sensors):
var_name = prefix+data_rate+'_'+level+'_'+datatype+'_top_'+data_units+'_sensorid_'+str(sensor)+'_clean_sun_removed'+suffix
- data = get_data(var_name)
+ data = get(var_name)
dalleyes[:, :, idx] = data[1]
iE = np.where(np.abs(energies-data[2]) > en_chk*energies)
if iE[0].size != 0:
@@ -153,19 +155,13 @@ def mms_feeps_omni(eyes, probe='1', datatype='electron', data_units='intensity',
if probe == '4' and datatype == 'ion':
flux_omni = flux_omni*iGfact[3]
- store_data('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, data={'x': tmpdata[0], 'y': flux_omni, 'v': energies})
+ store('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, data={'x': tmpdata[0], 'y': flux_omni, 'v': energies})
options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, 'spec', True)
options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, 'ylog', True)
options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, 'zlog', True)
- options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, 'Colormap', 'spedas')
options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, 'ztitle', units_label)
options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, 'ytitle', 'MMS' + str(probe) + ' ' + datatype)
options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix, 'ysubtitle', '[keV]')
out_vars.append('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_omni'+suffix)
return out_vars
-
-
-
-
-
diff --git a/pyspedas/mms/feeps/mms_feeps_pad.py b/pyspedas/mms/feeps/mms_feeps_pad.py
index 8387e7e3..e47416cb 100644
--- a/pyspedas/mms/feeps/mms_feeps_pad.py
+++ b/pyspedas/mms/feeps/mms_feeps_pad.py
@@ -1,8 +1,7 @@
-
import logging
import warnings
import numpy as np
-from pytplot import get_data, store_data, options
+from pytplot import get, store, options
from pyspedas.mms.feeps.mms_feeps_pitch_angles import mms_feeps_pitch_angles
from pyspedas.mms.feeps.mms_feeps_active_eyes import mms_feeps_active_eyes
from pyspedas.mms.feeps.mms_feeps_pad_spinavg import mms_feeps_pad_spinavg
@@ -18,6 +17,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_feeps_pad(bin_size=16.3636, probe='1', energy=[70, 600], level='l2', suffix='', datatype='electron', data_units='intensity', data_rate='srvy', angles_from_bfield=False):
"""
This function will calculate pitch angle distributions using data from the MMS Fly's Eye Energetic Particle Sensor (FEEPS)
@@ -51,7 +51,8 @@ def mms_feeps_pad(bin_size=16.3636, probe='1', energy=[70, 600], level='l2', suf
angles_from_bfield: bool
calculate the pitch angles from the B-field data instead of reading from the CDFs
- Returns:
+ Returns
+ --------
List of tplot variables created.
"""
@@ -72,7 +73,8 @@ def mms_feeps_pad(bin_size=16.3636, probe='1', energy=[70, 600], level='l2', suf
elif data_units == 'counts':
units_label = '[counts/s]'
- if not isinstance(probe, str): probe=str(probe)
+ if not isinstance(probe, str):
+ probe = str(probe)
prefix = 'mms' + probe
n_pabins = 180/bin_size
@@ -81,15 +83,18 @@ def mms_feeps_pad(bin_size=16.3636, probe='1', energy=[70, 600], level='l2', suf
if data_rate == 'brst' and angles_from_bfield == False:
# v5.5+ = mms1_epd_feeps_srvy_l2_electron_pitch_angle
- pad_pas = get_data(prefix+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_pitch_angle'+suffix)
+ pad_pas = get(prefix+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_pitch_angle'+suffix)
if pad_pas is None:
logging.error("Error reading variable containing FEEPS pitch angles")
return
pa_times = pad_pas[0]
pa_data = pad_pas[1]
else:
- pa_var, idx_maps = mms_feeps_pitch_angles(probe=probe, level=level, data_rate=data_rate, datatype=datatype, suffix=suffix)
- pa_times, pa_data = get_data(pa_var)
+ feeps_pa_data = mms_feeps_pitch_angles(probe=probe, level=level, data_rate=data_rate, datatype=datatype, suffix=suffix)
+ if feeps_pa_data is None:
+ return
+ pa_var, idx_maps = feeps_pa_data
+ pa_times, pa_data = get(pa_var)
if pa_data is None:
logging.error("Error, couldn't find the PA variable")
@@ -129,7 +134,7 @@ def mms_feeps_pad(bin_size=16.3636, probe='1', energy=[70, 600], level='l2', suf
particle_idxs = [eye-1 for eye in eyes[s_type]]
for isen, sensor_num in enumerate(particle_idxs):
var_name = 'mms'+str(probe)+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+s_type+'_'+data_units+'_sensorid_'+str(sensor_num+1)+'_clean_sun_removed'+suffix
- times, data, energies = get_data(var_name)
+ times, data, energies = get(var_name)
data[data == 0] = 'nan' # remove any 0s before averaging
if np.isnan(energies[0]): # assumes all energies are NaNs if the first is
continue
@@ -154,10 +159,10 @@ def mms_feeps_pad(bin_size=16.3636, probe='1', energy=[70, 600], level='l2', suf
if not np.isnan(dpa[pa_idx, :][0]):
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
- ind = np.where((dpa[pa_idx, :] + dangresp >= pa_label[ipa]-delta_pa) & (dpa[pa_idx, :]-dangresp < pa_label[ipa]+delta_pa))
- if ind[0].size != 0:
- if len(ind[0]) > 1:
- pa_flux[pa_idx, ipa] = nanmean(dflux[pa_idx, ind[0]], axis=0)
+ ind = np.argwhere((dpa[pa_idx, :] + dangresp >= pa_label[ipa]-delta_pa) & (dpa[pa_idx, :]-dangresp < pa_label[ipa]+delta_pa)).flatten()
+ if ind.size != 0:
+ if len(ind) > 1:
+ pa_flux[pa_idx, ipa] = nanmean(dflux[pa_idx, ind], axis=0)
else:
pa_flux[pa_idx, ipa] = dflux[pa_idx, ind[0]]
@@ -166,11 +171,10 @@ def mms_feeps_pad(bin_size=16.3636, probe='1', energy=[70, 600], level='l2', suf
en_range_string = str(int(energy[0])) + '-' + str(int(energy[1])) + 'keV'
new_name = 'mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_'+ en_range_string +'_pad'+suffix
- store_data(new_name, data={'x': times, 'y': pa_flux, 'v': pa_label})
+ store(new_name, data={'x': times, 'y': pa_flux, 'v': pa_label})
options(new_name, 'ylog', False)
options(new_name, 'zlog', True)
options(new_name, 'spec', True)
- options(new_name, 'Colormap', 'spedas')
options(new_name, 'ztitle', units_label)
options(new_name, 'ytitle', 'MMS' + str(probe) + ' ' + datatype + ' PA')
options(new_name, 'ysubtitle', '[deg]')
diff --git a/pyspedas/mms/feeps/mms_feeps_pad_spinavg.py b/pyspedas/mms/feeps/mms_feeps_pad_spinavg.py
index 4b8c7d48..ce2952b9 100644
--- a/pyspedas/mms/feeps/mms_feeps_pad_spinavg.py
+++ b/pyspedas/mms/feeps/mms_feeps_pad_spinavg.py
@@ -1,8 +1,7 @@
-
import warnings
import numpy as np
import scipy
-from pytplot import get_data, store_data, options
+from pytplot import get_data, store, options
# use nanmean from bottleneck if it's installed, otherwise use the numpy one
# bottleneck nanmean is ~2.5x faster
@@ -12,11 +11,13 @@
except ImportError:
nanmean = np.nanmean
+
def mms_feeps_pad_spinavg(probe='1', data_units='intensity', datatype='electron', data_rate='srvy', level='l2', suffix='', energy=[70, 600], bin_size=16.3636):
"""
This function will spin-average the FEEPS pitch angle distributions
- Parameters:
+ Parameters
+ -----------
probe: str
probe #, e.g., '4' for MMS4
@@ -41,10 +42,10 @@ def mms_feeps_pad_spinavg(probe='1', data_units='intensity', datatype='electron'
bin_size: float
size of the pitch angle bins
- Returns:
+ Returns
+ ----------
Name of tplot variable created.
"""
-
units_label = ''
if data_units == 'intensity':
units_label = '1/(cm^2-sr-s-keV)'
@@ -94,11 +95,10 @@ def mms_feeps_pad_spinavg(probe='1', data_units='intensity', datatype='electron'
current_start = spin_starts[spin_idx] + 1
# store_data(var_name + '_spin' + suffix, data={'x': spin_times, 'y': spin_avg_flux, 'v': angles})
- store_data(var_name + '_spin' + suffix, data={'x': spin_times, 'y': rebinned_data, 'v': new_bins})
+ store(var_name + '_spin' + suffix, data={'x': spin_times, 'y': rebinned_data, 'v': new_bins})
options(var_name + '_spin' + suffix, 'spec', True)
options(var_name + '_spin' + suffix, 'ylog', False)
options(var_name + '_spin' + suffix, 'zlog', True)
- options(var_name + '_spin' + suffix, 'Colormap', 'spedas')
options(var_name + '_spin' + suffix, 'ztitle', units_label)
options(var_name + '_spin' + suffix, 'ytitle', 'MMS' + str(probe) + ' ' + datatype + ' PA')
options(var_name + '_spin' + suffix, 'ysubtitle', '[deg]')
diff --git a/pyspedas/mms/feeps/mms_feeps_pitch_angles.py b/pyspedas/mms/feeps/mms_feeps_pitch_angles.py
index 1addb9c6..194b1839 100644
--- a/pyspedas/mms/feeps/mms_feeps_pitch_angles.py
+++ b/pyspedas/mms/feeps/mms_feeps_pitch_angles.py
@@ -1,18 +1,20 @@
-
+import logging
from pyspedas.mms.feeps.mms_feeps_active_eyes import mms_feeps_active_eyes
from pyspedas import mms_load_fgm
from pyspedas import data_exists
-from pytplot import get_data, store_data
+from pytplot import get, store
import numpy as np
import math
+
def mms_feeps_pitch_angles(trange=None, probe='1', level='l2', data_rate='srvy', datatype='electron', suffix=''):
"""
Generates a tplot variable containing the FEEPS pitch angles for each telescope from magnetic field data.
- Parameters:
+ Parameters
+ -----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -31,15 +33,15 @@ def mms_feeps_pitch_angles(trange=None, probe='1', level='l2', data_rate='srvy',
suffix: str
suffix of the loaded data
- Returns:
+ Returns
+ ----------
Tuple: (tplot variable created, hash table used by PAD routine)
"""
-
# get the times from the currently loaded FEEPS data
- pa_variable = get_data('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_pitch_angle'+suffix)
+ pa_variable = get('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_pitch_angle'+suffix, dt=True)
if pa_variable is None:
- print('Error reading pitch angle variable')
+ logging.error('Error reading pitch angle variable')
return
times = pa_variable[0]
@@ -47,14 +49,16 @@ def mms_feeps_pitch_angles(trange=None, probe='1', level='l2', data_rate='srvy',
if times is not None:
if trange is None:
- trange = [float(times.min()), float(times.max())]
+ times_min = (times[0] - np.datetime64('1970-01-01T00:00:00'))/np.timedelta64(1, 's')
+ times_max = (times[1] - np.datetime64('1970-01-01T00:00:00'))/np.timedelta64(1, 's')
+ trange = [times_min, times_max]
eyes = mms_feeps_active_eyes(trange, probe, data_rate, datatype, level)
# need the B-field data
mms_load_fgm(trange=trange, probe=probe, data_rate=data_rate, varformat='*_b_bcs_*')
- btimes, Bbcs = get_data('mms'+probe+'_fgm_b_bcs_'+data_rate+'_l2')
+ btimes, Bbcs = get('mms'+probe+'_fgm_b_bcs_'+data_rate+'_l2')
idx_maps = None
@@ -177,47 +181,47 @@ def mms_feeps_pitch_angles(trange=None, probe='1', level='l2', data_rate='srvy',
Vbcs = Vb12bcs
pas[:, i] = 180./math.pi*np.arccos((Vbcs[0]*Bbcs[:, 0] + Vbcs[1]*Bbcs[:, 1] + Vbcs[2]*Bbcs[:, 2])/(np.sqrt(Vbcs[0]**2+Vbcs[1]**2+Vbcs[2]**2) * np.sqrt(Bbcs[:, 0]**2+Bbcs[:, 1]**2+Bbcs[:, 2]**2)))
- if data_rate == 'srvy':
- # the following 2 hash tables map TOP/BOTTOM telescope #s to index of the PA array created above
- top_tele_idx_map = {}
- bot_tele_idx_map = {}
- top_tele_idx_map[1] = 0
- top_tele_idx_map[2] = 1
- top_tele_idx_map[3] = 2
- top_tele_idx_map[4] = 3
- top_tele_idx_map[5] = 4
- top_tele_idx_map[9] = 5
- top_tele_idx_map[10] = 6
- top_tele_idx_map[11] = 7
- top_tele_idx_map[12] = 8
- bot_tele_idx_map[1] = 9
- bot_tele_idx_map[2] = 10
- bot_tele_idx_map[3] = 11
- bot_tele_idx_map[4] = 12
- bot_tele_idx_map[5] = 13
- bot_tele_idx_map[9] = 14
- bot_tele_idx_map[10] = 15
- bot_tele_idx_map[11] = 16
- bot_tele_idx_map[12] = 17
-
- top_idxs = []
- bot_idxs = []
-
- # PAs for only active eyes
- new_pas = np.empty([len(btimes), len(eyes['top'])+len(eyes['bottom'])]) # pitch angles for each eye at eaceh time
-
- for top_idx, top_eye in enumerate(eyes['top']):
- new_pas[:, top_idx] = pas[:, top_tele_idx_map[top_eye]]
- top_idxs.append(top_idx)
-
- for bot_idx, bot_eye in enumerate(eyes['bottom']):
- new_pas[:, bot_idx+len(eyes['top'])] = pas[:, bot_tele_idx_map[bot_eye]]
- bot_idxs.append(bot_idx+len(eyes['top']))
-
- idx_maps = {'electron-top': top_idxs, 'electron-bottom': bot_idxs}
-
- else:
- new_pas = pas
+ if data_rate == 'srvy':
+ # the following 2 hash tables map TOP/BOTTOM telescope #s to index of the PA array created above
+ top_tele_idx_map = {}
+ bot_tele_idx_map = {}
+ top_tele_idx_map[1] = 0
+ top_tele_idx_map[2] = 1
+ top_tele_idx_map[3] = 2
+ top_tele_idx_map[4] = 3
+ top_tele_idx_map[5] = 4
+ top_tele_idx_map[9] = 5
+ top_tele_idx_map[10] = 6
+ top_tele_idx_map[11] = 7
+ top_tele_idx_map[12] = 8
+ bot_tele_idx_map[1] = 9
+ bot_tele_idx_map[2] = 10
+ bot_tele_idx_map[3] = 11
+ bot_tele_idx_map[4] = 12
+ bot_tele_idx_map[5] = 13
+ bot_tele_idx_map[9] = 14
+ bot_tele_idx_map[10] = 15
+ bot_tele_idx_map[11] = 16
+ bot_tele_idx_map[12] = 17
+
+ top_idxs = []
+ bot_idxs = []
+
+ # PAs for only active eyes
+ new_pas = np.empty([len(btimes), len(eyes['top'])+len(eyes['bottom'])]) # pitch angles for each eye at eaceh time
+
+ for top_idx, top_eye in enumerate(eyes['top']):
+ new_pas[:, top_idx] = pas[:, top_tele_idx_map[top_eye]]
+ top_idxs.append(top_idx)
+
+ for bot_idx, bot_eye in enumerate(eyes['bottom']):
+ new_pas[:, bot_idx+len(eyes['top'])] = pas[:, bot_tele_idx_map[bot_eye]]
+ bot_idxs.append(bot_idx+len(eyes['top']))
+
+ idx_maps = {'electron-top': top_idxs, 'electron-bottom': bot_idxs}
+
+ else:
+ new_pas = pas
elif datatype == 'ion':
pas = np.empty([len(btimes), 6]) # pitch angles for each eye at each time
@@ -286,18 +290,20 @@ def mms_feeps_pitch_angles(trange=None, probe='1', level='l2', data_rate='srvy',
idx_maps = {'ion-top': top_idxs, 'ion-bottom': bot_idxs}
-
outvar = 'mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_pa'+suffix
- if data_exists(outvar): # kludge for bug when the PAs were previously calculated
- return (outvar, idx_maps)
+ if data_exists(outvar): # kludge for bug when the PAs were previously calculated
+ # check if the current variable's time array matches our output
+ current_pas = get(outvar)
+ if np.array_equal(current_pas.times, btimes):
+ return outvar, idx_maps
- store_data(outvar, data={'x': btimes, 'y': new_pas})
+ store(outvar, data={'x': btimes, 'y': new_pas})
# interpolate to the PA time stamps
- outdata = get_data(outvar, xarray=True)
+ outdata = get(outvar, xarray=True)
outdata_interpolated = outdata.interp({'time': times})
- store_data(outvar, data={'x': times, 'y': outdata_interpolated.values})
+ store(outvar, data={'x': times, 'y': outdata_interpolated.values})
- return (outvar, idx_maps)
+ return outvar, idx_maps
diff --git a/pyspedas/mms/feeps/mms_feeps_remove_bad_data.py b/pyspedas/mms/feeps/mms_feeps_remove_bad_data.py
index 5cd06531..87ac367a 100644
--- a/pyspedas/mms/feeps/mms_feeps_remove_bad_data.py
+++ b/pyspedas/mms/feeps/mms_feeps_remove_bad_data.py
@@ -1,5 +1,5 @@
+import logging
import numpy as np
-import datetime as dt
from pyspedas import time_string, time_double, tnames
import pytplot
@@ -8,7 +8,8 @@ def mms_feeps_remove_bad_data(probe='1', data_rate='srvy', datatype='electron',
"""
This function removes bad eyes, bad lowest energy channels based on data from Drew Turner
- Parameters:
+ Parameters
+ ------------
probe: str
probe #, e.g., '4' for MMS4
@@ -26,11 +27,12 @@ def mms_feeps_remove_bad_data(probe='1', data_rate='srvy', datatype='electron',
trange: list of str or list of float
Time range
- Returns:
+ Returns
+ ------------
None
"""
if trange is None:
- print('Time range required for mms_feeps_remove_bad_data.')
+ logging.error('Time range required for mms_feeps_remove_bad_data.')
return
data_rate_level = data_rate + '_' + level
@@ -63,40 +65,45 @@ def mms_feeps_remove_bad_data(probe='1', data_rate='srvy', datatype='electron',
# top electrons
for bad_var in bad_data['top']:
- if bad_var in [6, 7, 8]: continue # ion eyes
+ if bad_var in [6, 7, 8]:
+ continue # ion eyes
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_counts_sensorid_'+str(bad_var)+suffix))
# bottom electrons
for bad_var in bad_data['bottom']:
- if bad_var in [6, 7, 8]: continue # ion eyes
+ if bad_var in [6, 7, 8]:
+ continue # ion eyes
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_counts_sensorid_'+str(bad_var)+suffix))
# top ions
for bad_var in bad_data['top']:
- if bad_var not in [6, 7, 8]: continue # ion eyes
+ if bad_var not in [6, 7, 8]:
+ continue # ion eyes
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_counts_sensorid_'+str(bad_var)+suffix))
# bottom ions
for bad_var in bad_data['bottom']:
- if bad_var not in [6, 7, 8]: continue # ion eyes
+ if bad_var not in [6, 7, 8]:
+ continue # ion eyes
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_counts_sensorid_'+str(bad_var)+suffix))
for bad_var in bad_vars:
- if bad_var == []: continue
- bad_var_data = pytplot.get_data(bad_var[0])
+ if not bad_var:
+ continue
+ bad_var_data = pytplot.get(bad_var[0])
if bad_var_data is not None:
times, data, energies = bad_var_data
data[:] = np.nan
- pytplot.store_data(bad_var[0], data={'x': times, 'y': data, 'v': energies})
+ pytplot.store(bad_var[0], data={'x': times, 'y': data, 'v': energies})
# ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; 2. BAD LOWEST E-CHANNELS ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
# ; Next, these eyes have bad first channels (i.e., lowest energy channel, E-channel 0 in IDL indexing).
@@ -163,132 +170,147 @@ def mms_feeps_remove_bad_data(probe='1', data_rate='srvy', datatype='electron',
# top electrons
for bad_var in bad_ch0['top']:
- if bad_var in [6, 7, 8]: continue # ion eyes
+ if bad_var in [6, 7, 8]:
+ continue # ion eyes
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_counts_sensorid_'+str(bad_var)+suffix))
# bottom electrons
for bad_var in bad_ch0['bottom']:
- if bad_var in [6, 7, 8]: continue # ion eyes
+ if bad_var in [6, 7, 8]:
+ continue # ion eyes
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_counts_sensorid_'+str(bad_var)+suffix))
# top ions
for bad_var in bad_ch0['top']:
- if bad_var not in [6, 7, 8]: continue # ion eyes
+ if bad_var not in [6, 7, 8]:
+ continue # ion eyes
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_counts_sensorid_'+str(bad_var)+suffix))
# bottom ions
for bad_var in bad_ch0['bottom']:
- if bad_var not in [6, 7, 8]: continue # ion eyes
+ if bad_var not in [6, 7, 8]:
+ continue # ion eyes
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_counts_sensorid_'+str(bad_var)+suffix))
-
#### bottom 2 channels
# top electrons
for bad_var in bad_ch1['top']:
- if bad_var in [6, 7, 8]: continue # ion eyes
+ if bad_var in [6, 7, 8]:
+ continue # ion eyes
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_counts_sensorid_'+str(bad_var)+suffix))
# bottom electrons
for bad_var in bad_ch1['bottom']:
- if bad_var in [6, 7, 8]: continue # ion eyes
+ if bad_var in [6, 7, 8]:
+ continue # ion eyes
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_counts_sensorid_'+str(bad_var)+suffix))
# top ions
for bad_var in bad_ch1['top']:
- if bad_var not in [6, 7, 8]: continue # ion eyes
+ if bad_var not in [6, 7, 8]:
+ continue # ion eyes
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_counts_sensorid_'+str(bad_var)+suffix))
# bottom ions
for bad_var in bad_ch1['bottom']:
- if bad_var not in [6, 7, 8]: continue # ion eyes
+ if bad_var not in [6, 7, 8]:
+ continue # ion eyes
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars_both_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_counts_sensorid_'+str(bad_var)+suffix))
-
#### bottom 3 channels
-
# top electrons
for bad_var in bad_ch2['top']:
- if bad_var in [6, 7, 8]: continue # ion eyes
+ if bad_var in [6, 7, 8]:
+ continue # ion eyes
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_top_counts_sensorid_'+str(bad_var)+suffix))
# bottom electrons
for bad_var in bad_ch2['bottom']:
- if bad_var in [6, 7, 8]: continue # ion eyes
+ if bad_var in [6, 7, 8]:
+ continue # ion eyes
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_electron_bottom_counts_sensorid_'+str(bad_var)+suffix))
# top ions
for bad_var in bad_ch2['top']:
- if bad_var not in [6, 7, 8]: continue # ion eyes
+ if bad_var not in [6, 7, 8]:
+ continue # ion eyes
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_top_counts_sensorid_'+str(bad_var)+suffix))
# bottom ions
for bad_var in bad_ch2['bottom']:
- if bad_var not in [6, 7, 8]: continue # ion eyes
+ if bad_var not in [6, 7, 8]:
+ continue # ion eyes
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_count_rate_sensorid_'+str(bad_var)+suffix))
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_intensity_sensorid_'+str(bad_var)+suffix))
bad_vars_3_chans.append(tnames('mms'+str(probe)+'_epd_feeps_'+data_rate_level+'_ion_bottom_counts_sensorid_'+str(bad_var)+suffix))
# set the first energy channel to NaN
for bad_var in bad_vars:
- if bad_var == []: continue
- bad_var_data = pytplot.get_data(bad_var[0])
+ if not bad_var:
+ continue
+ bad_var_data = pytplot.get(bad_var[0])
if bad_var_data is not None:
times, data, energies = bad_var_data
# check if the energy table contains all nans
- if np.isnan(np.sum(energies)): continue
+ if np.isnan(np.sum(energies)):
+ continue
data[:, 0] = np.nan
- pytplot.store_data(bad_var[0], data={'x': times, 'y': data, 'v': energies})
+ pytplot.store(bad_var[0], data={'x': times, 'y': data, 'v': energies})
# set the first and second energy channels to NaN
for bad_var in bad_vars_both_chans:
- if bad_var == []: continue
- bad_var_data = pytplot.get_data(bad_var[0])
+ if not bad_var:
+ continue
+ bad_var_data = pytplot.get(bad_var[0])
if bad_var_data is not None:
times, data, energies = bad_var_data
# check if the energy table contains all names
- if np.isnan(np.sum(energies)): continue
+ if np.isnan(np.sum(energies)):
+ continue
data[:, 0] = np.nan
data[:, 1] = np.nan
- pytplot.store_data(bad_var[0], data={'x': times, 'y': data, 'v': energies})
+ pytplot.store(bad_var[0], data={'x': times, 'y': data, 'v': energies})
# set the bottom 3 energy channels to NaN
for bad_var in bad_vars_3_chans:
- if bad_var == []: continue
- bad_var_data = pytplot.get_data(bad_var[0])
+ if not bad_var:
+ continue
+ bad_var_data = pytplot.get(bad_var[0])
if bad_var_data is not None:
times, data, energies = bad_var_data
# check if the energy table contains all names
- if np.isnan(np.sum(energies)): continue
+ if np.isnan(np.sum(energies)):
+ continue
data[:, 0] = np.nan
data[:, 1] = np.nan
data[:, 2] = np.nan
- pytplot.store_data(bad_var[0], data={'x': times, 'y': data, 'v': energies})
+ pytplot.store(bad_var[0], data={'x': times, 'y': data, 'v': energies})
diff --git a/pyspedas/mms/feeps/mms_feeps_remove_sun.py b/pyspedas/mms/feeps/mms_feeps_remove_sun.py
index 5643ca44..8fa03dae 100644
--- a/pyspedas/mms/feeps/mms_feeps_remove_sun.py
+++ b/pyspedas/mms/feeps/mms_feeps_remove_sun.py
@@ -1,17 +1,18 @@
-
import logging
from .mms_read_feeps_sector_masks_csv import mms_read_feeps_sector_masks_csv
-from pytplot import get_data, store_data
+from pytplot import get, store
import numpy as np
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_feeps_remove_sun(sensor_eyes, trange, probe='1', datatype='electron', data_units='intensity', data_rate='srvy', level='l2', suffix=''):
"""
Removes the sunlight contamination from FEEPS data
- Parameters:
+ Parameters
+ -----------
sensor_eyes: dict
Hash table containing the active sensor eyes
@@ -36,11 +37,18 @@ def mms_feeps_remove_sun(sensor_eyes, trange, probe='1', datatype='electron', da
suffix: str
suffix of the loaded data
- Returns:
+ Returns
+ -----------
List of tplot variables created.
"""
-
- sector_times, spin_sectors = get_data('mms'+probe+'_epd_feeps_' + data_rate + '_' + level + '_' + datatype + '_spinsectnum'+suffix)
+ spin_sector_tuple = get('mms'+probe+'_epd_feeps_' + data_rate + '_' + level + '_' + datatype + '_spinsectnum'+suffix)
+
+ if spin_sector_tuple is None:
+ logging.warning(f"Error - couldn't find the spin sector variable!!!! Cannot remove sun contamination!")
+ return
+
+ sector_times, spin_sectors = spin_sector_tuple
+
mask_sectors = mms_read_feeps_sector_masks_csv(trange=trange)
out_vars = []
@@ -50,7 +58,7 @@ def mms_feeps_remove_sun(sensor_eyes, trange, probe='1', datatype='electron', da
for sensor in top_sensors:
var_name = 'mms'+str(probe)+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_top_'+data_units+'_sensorid_'+sensor+'_clean'
- top_data_tuple = get_data(var_name+suffix)
+ top_data_tuple = get(var_name+suffix)
if top_data_tuple is None:
logging.error('skipping variable: ' + var_name)
continue
@@ -65,7 +73,7 @@ def mms_feeps_remove_sun(sensor_eyes, trange, probe='1', datatype='electron', da
top_data[this_bad_sector] = np.nan
try:
- store_data(var_name+'_sun_removed'+suffix, data={'x': times, 'y': top_data, 'v': top_energies})
+ store(var_name+'_sun_removed'+suffix, data={'x': times, 'y': top_data, 'v': top_energies})
out_vars.append(var_name+'_sun_removed'+suffix)
except Warning:
continue
@@ -74,7 +82,7 @@ def mms_feeps_remove_sun(sensor_eyes, trange, probe='1', datatype='electron', da
for sensor in bot_sensors:
var_name = 'mms'+str(probe)+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_bottom_'+data_units+'_sensorid_'+sensor+'_clean'
- bot_data_tuple = get_data(var_name+suffix)
+ bot_data_tuple = get(var_name+suffix)
if bot_data_tuple is None:
logging.error('skipping: ' + var_name)
continue
@@ -89,7 +97,7 @@ def mms_feeps_remove_sun(sensor_eyes, trange, probe='1', datatype='electron', da
bot_data[this_bad_sector] = np.nan
try:
- store_data(var_name+'_sun_removed'+suffix, data={'x': times, 'y': bot_data, 'v': bot_energies})
+ store(var_name+'_sun_removed'+suffix, data={'x': times, 'y': bot_data, 'v': bot_energies})
out_vars.append(var_name+'_sun_removed'+suffix)
except Warning:
continue
diff --git a/pyspedas/mms/feeps/mms_feeps_spin_avg.py b/pyspedas/mms/feeps/mms_feeps_spin_avg.py
index fefb79c8..7f113754 100644
--- a/pyspedas/mms/feeps/mms_feeps_spin_avg.py
+++ b/pyspedas/mms/feeps/mms_feeps_spin_avg.py
@@ -1,6 +1,7 @@
import warnings
+import logging
import numpy as np
-from pytplot import get_data, store_data, options
+from pytplot import get, store, options
# use nanmean from bottleneck if it's installed, otherwise use the numpy one
# bottleneck nanmean is ~2.5x faster
@@ -10,11 +11,13 @@
except ImportError:
nanmean = np.nanmean
+
def mms_feeps_spin_avg(probe='1', data_units='intensity', datatype='electron', data_rate='srvy', level='l2', suffix=''):
"""
- This function will spin-average the omni-directional FEEPS energy spectra
+ This function will spin-average the omnidirectional FEEPS energy spectra
- Parameters:
+ Parameters
+ -----------
probe: str
probe #, e.g., '4' for MMS4
@@ -33,10 +36,10 @@ def mms_feeps_spin_avg(probe='1', data_units='intensity', datatype='electron', d
suffix: str
suffix of the loaded data
- Returns:
+ Returns
+ -----------
Name of tplot variable created.
"""
-
units_label = ''
if data_units == 'intensity':
units_label = '1/(cm^2-sr-s-keV)'
@@ -52,13 +55,19 @@ def mms_feeps_spin_avg(probe='1', data_units='intensity', datatype='electron', d
# get the spin sectors
# v5.5+ = mms1_epd_feeps_srvy_l1b_electron_spinsectnum
- sector_times, spin_sectors = get_data(prefix + data_rate + '_' + level + '_' + datatype + '_spinsectnum' + suffix)
+ spin_sector_tuple = get(prefix + data_rate + '_' + level + '_' + datatype + '_spinsectnum' + suffix)
+
+ if spin_sector_tuple is None:
+ logging.warning('Problem reading spin sector variable')
+ return
+
+ sector_times, spin_sectors = spin_sector_tuple
spin_starts = [spin_end + 1 for spin_end in np.where(spin_sectors[:-1] >= spin_sectors[1:])[0]]
var_name = prefix + data_rate + '_' + level + '_' + datatype + '_' + data_units + '_omni'
- times, data, energies = get_data(var_name + suffix)
+ times, data, energies = get(var_name + suffix)
spin_avg_flux = np.zeros([len(spin_starts), len(energies)])
@@ -69,12 +78,11 @@ def mms_feeps_spin_avg(probe='1', data_units='intensity', datatype='electron', d
spin_avg_flux[spin_idx-1, :] = nanmean(data[current_start:spin_starts[spin_idx]+1, :], axis=0)
current_start = spin_starts[spin_idx] + 1
- store_data(var_name + '_spin' + suffix, data={'x': times[spin_starts], 'y': spin_avg_flux, 'v': energies})
+ store(var_name + '_spin' + suffix, data={'x': times[spin_starts], 'y': spin_avg_flux, 'v': energies})
options(var_name + '_spin' + suffix, 'spec', True)
options(var_name + '_spin' + suffix, 'ylog', True)
options(var_name + '_spin' + suffix, 'zlog', True)
options(var_name + '_spin' + suffix, 'yrange', [lower_en, 600.0])
- options(var_name + '_spin' + suffix, 'Colormap', 'spedas')
options(var_name + '_spin' + suffix, 'ztitle', units_label)
options(var_name + '_spin' + suffix, 'ytitle', 'MMS' + str(probe) + ' ' + datatype)
options(var_name + '_spin' + suffix, 'ysubtitle', '[keV]')
diff --git a/pyspedas/mms/feeps/mms_feeps_split_integral_ch.py b/pyspedas/mms/feeps/mms_feeps_split_integral_ch.py
index 6c26370c..bd481749 100644
--- a/pyspedas/mms/feeps/mms_feeps_split_integral_ch.py
+++ b/pyspedas/mms/feeps/mms_feeps_split_integral_ch.py
@@ -1,10 +1,10 @@
-
import logging
import pytplot
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_feeps_split_integral_ch(units_type, species, probe, suffix='', data_rate='srvy', level='l2', sensor_eyes=None):
"""
This function splits the last integral channel from the FEEPS spectra,
@@ -13,7 +13,8 @@ def mms_feeps_split_integral_ch(units_type, species, probe, suffix='', data_rate
[original variable]_clean - spectra with the integral channel removed
[original variable]_500keV_int - the integral channel that was removed
- Parameters:
+ Parameters
+ -----------
units_type: str
instrument datatype, e.g., 'intensity'
@@ -35,10 +36,10 @@ def mms_feeps_split_integral_ch(units_type, species, probe, suffix='', data_rate
sensor_eyes: dict
Hash table containing the active sensor eyes
- Returns:
+ Returns
+ -----------
List of tplot variables created.
"""
-
if sensor_eyes is None:
logging.error('Error: sensor_eyes not defined')
return
@@ -51,12 +52,18 @@ def mms_feeps_split_integral_ch(units_type, species, probe, suffix='', data_rate
for sensor in top_sensors:
top_name = 'mms'+str(probe)+'_epd_feeps_'+data_rate+'_'+level+'_'+species+'_top_'+units_type+'_sensorid_'+str(sensor)
- time, data, energies = pytplot.get_data(top_name+suffix)
+ data_tuple = pytplot.get(top_name+suffix)
+
+ if data_tuple is None:
+ logging.warning(f"Couldn't find the variable: {top_name+suffix}")
+ continue
+
+ time, data, energies = data_tuple
top_name_out = top_name+'_clean'+suffix
try:
- pytplot.store_data(top_name_out, data={'x': time, 'y': data[:, :-1], 'v': energies[:-1]})
- pytplot.store_data(top_name+'_500keV_int'+suffix, data={'x': time, 'y': data[:, -1]})
+ pytplot.store(top_name_out, data={'x': time, 'y': data[:, :-1], 'v': energies[:-1]})
+ pytplot.store(top_name+'_500keV_int'+suffix, data={'x': time, 'y': data[:, -1]})
out_vars.append(top_name_out)
out_vars.append(top_name+'_500keV_int'+suffix)
except Warning:
@@ -70,12 +77,12 @@ def mms_feeps_split_integral_ch(units_type, species, probe, suffix='', data_rate
for sensor in bot_sensors:
bot_name = 'mms'+str(probe)+'_epd_feeps_'+data_rate+'_'+level+'_'+species+'_bottom_'+units_type+'_sensorid_'+str(sensor)
- time, data, energies = pytplot.get_data(bot_name+suffix)
+ time, data, energies = pytplot.get(bot_name+suffix)
bot_name_out = bot_name+'_clean'+suffix
try:
- pytplot.store_data(bot_name_out, data={'x': time, 'y': data[:, :-1], 'v': energies[:-1]})
- pytplot.store_data(bot_name+'_500keV_int'+suffix, data={'x': time, 'y': data[:, -1]})
+ pytplot.store(bot_name_out, data={'x': time, 'y': data[:, :-1], 'v': energies[:-1]})
+ pytplot.store(bot_name+'_500keV_int'+suffix, data={'x': time, 'y': data[:, -1]})
out_vars.append(bot_name_out)
out_vars.append(bot_name+'_500keV_int'+suffix)
except Warning:
@@ -83,4 +90,4 @@ def mms_feeps_split_integral_ch(units_type, species, probe, suffix='', data_rate
pytplot.del_data(bot_name+suffix)
- return out_vars
\ No newline at end of file
+ return out_vars
diff --git a/pyspedas/mms/feeps/mms_read_feeps_sector_masks_csv.py b/pyspedas/mms/feeps/mms_read_feeps_sector_masks_csv.py
index 0ab08318..5a769983 100644
--- a/pyspedas/mms/feeps/mms_read_feeps_sector_masks_csv.py
+++ b/pyspedas/mms/feeps/mms_read_feeps_sector_masks_csv.py
@@ -1,20 +1,22 @@
-
import csv
import os
import numpy as np
from pyspedas import time_double, time_string
+
def mms_read_feeps_sector_masks_csv(trange):
"""
This function returns the FEEPS sectors to mask due to sunlight contamination
- Parameters:
+ Parameters
+ -----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
- Returns:
+ Returns
+ -----------
Hash table containing the sectors to mask for each spacecraft and sensor ID
"""
@@ -27,7 +29,8 @@ def mms_read_feeps_sector_masks_csv(trange):
1506988800.0000000, # 10/3/2017
1538697600.0000000, # 10/5/2018
1642032000.0000000, # 1/13/2022
- 1651795200.0000000] # 5/6/2022
+ 1651795200.0000000, # 5/6/2022
+ 1660521600.0000000] # 8/15/2022
# find the file closest to the start time
nearest_date = dates[(np.abs(np.array(dates)-time_double(trange[0]))).argmin()]
diff --git a/pyspedas/mms/feeps/sun/MMS1_FEEPS_ContaminatedSectors_20220815.csv b/pyspedas/mms/feeps/sun/MMS1_FEEPS_ContaminatedSectors_20220815.csv
new file mode 100644
index 00000000..73483cfd
--- /dev/null
+++ b/pyspedas/mms/feeps/sun/MMS1_FEEPS_ContaminatedSectors_20220815.csv
@@ -0,0 +1,64 @@
+1,0,0,0,1,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,1,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,0,0,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,0,0,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,1,0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0
+1,0,0,1,0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0
+1,0,1,0,0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0
+1,0,1,1,0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0
+1,0,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0
+1,0,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0
+1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0
+1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0
+1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0
+1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,0,1,1,0
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,1,0
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,0,1,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,0,1,1,1
+1,0,0,0,0,1,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,0,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,0,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,0,1,1
+1,0,0,0,1,1,0,0,0,0,0,0,1,1,1,0,1,0,0,1,0,0,1,1
+1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,0,0,1,1
+1,0,0,0,1,0,0,0,0,0,1,0,1,1,1,0,0,0,0,1,0,0,1,1
+1,0,0,0,1,0,0,0,0,0,1,0,1,1,1,0,0,0,0,1,0,0,1,1
+1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,0,1,0,0,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,0,0,0,0,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,1,0
+1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,0,1,0
+1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,0,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,1,1
+1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,1,1
+1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,1
+1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,1
+1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0
+1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0
+1,0,0,0,1,1,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,0,1,0
+1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0
+1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
\ No newline at end of file
diff --git a/pyspedas/mms/feeps/sun/MMS2_FEEPS_ContaminatedSectors_20220815.csv b/pyspedas/mms/feeps/sun/MMS2_FEEPS_ContaminatedSectors_20220815.csv
new file mode 100644
index 00000000..4fb438bd
--- /dev/null
+++ b/pyspedas/mms/feeps/sun/MMS2_FEEPS_ContaminatedSectors_20220815.csv
@@ -0,0 +1,64 @@
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,1,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,1
+0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1
+1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1
+1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1
+1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,1,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,1,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,1,0,0,0,1,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,1,0,1,1,0,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,1,0,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,1,1
+0,0,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,1,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,1
+0,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,1,1,0,0,0,1,0,1
+0,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,1,1,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1
+0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1
+0,0,0,0,0,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
+0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
\ No newline at end of file
diff --git a/pyspedas/mms/feeps/sun/MMS3_FEEPS_ContaminatedSectors_20220815.csv b/pyspedas/mms/feeps/sun/MMS3_FEEPS_ContaminatedSectors_20220815.csv
new file mode 100644
index 00000000..347b4919
--- /dev/null
+++ b/pyspedas/mms/feeps/sun/MMS3_FEEPS_ContaminatedSectors_20220815.csv
@@ -0,0 +1,64 @@
+1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,0,1,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,1,0,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,1,0,0,0,0,0,1,0,1,1,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,1,0,0,0,0,0,1,0,1,1,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,1,0,0,0,0,0,1,0,1,1,1,0,0,0,0,0,1,0
+1,1,1,1,1,0,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,1,0
+1,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0
+1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,1,0
+1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0
+1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,0,0,1,0
+1,1,0,0,1,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,0,0,1,0
+1,1,0,0,1,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,0,0,1,0
+1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,1,1,0,0,1,0
+1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,0,0,1,0
+1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,0,1,1
+1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,0,1,1
+1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,0,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,0,1,1
+1,1,0,0,1,1,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,0,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,0,0,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,1,1
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,1,0,1,1,1
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,1,1,1
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,1,0,1,1,0
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,1,0,1,0,1,1,0
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1,1
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1,1
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1,1
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,1,1
+1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,1,1
+1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,1
+1,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,1,1,1
+1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0
+1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
+1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0
\ No newline at end of file
diff --git a/pyspedas/mms/feeps/sun/MMS4_FEEPS_ContaminatedSectors_20220815.csv b/pyspedas/mms/feeps/sun/MMS4_FEEPS_ContaminatedSectors_20220815.csv
new file mode 100644
index 00000000..909db8c8
--- /dev/null
+++ b/pyspedas/mms/feeps/sun/MMS4_FEEPS_ContaminatedSectors_20220815.csv
@@ -0,0 +1,64 @@
+0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1
+0,1,0,0,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1
+0,0,0,0,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1
+0,0,0,1,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1
+0,0,0,1,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1
+1,0,0,1,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1
+1,0,0,1,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1
+1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1
+1,0,0,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0
+1,0,0,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,0,0,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,0,0,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,1,0
+1,1,0,1,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,1,0
+1,1,1,1,0,0,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0
+1,1,1,1,0,0,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0
+1,1,1,1,0,0,1,1,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0
+1,1,1,1,0,0,1,1,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0
+1,1,1,1,0,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0
+1,1,1,1,0,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0
+1,1,1,1,0,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1
+1,1,1,1,0,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,1,1,1
+1,1,1,1,0,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1
+1,1,1,0,0,0,1,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1
+1,1,0,0,0,0,1,0,0,0,0,0,1,1,1,1,0,0,1,0,0,1,1,1
+1,1,0,0,0,0,1,0,0,0,0,0,1,1,1,1,0,0,1,1,1,1,1,1
+1,1,0,0,0,0,1,0,0,0,0,0,1,1,1,1,0,0,1,1,1,1,1,1
+1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,1
+1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,1
+0,1,0,0,0,0,1,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,1
+0,1,0,0,0,0,1,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,0
+0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,1,1,1,1,1,1,1,1,0
+0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,1,1,1,0,1,1,1,1,0
+0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,1,1,1,0,1,1,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,1
+0,0,0,0,1,0,1,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,1
+0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,0,0,0,1,1,1,1,1
+0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,0,0,0,1,1,1,1,1
+0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,1,1,1,1
+0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,0,0,0,1,1,1,1,1
+0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,0,0,0,1,0,1,1,1
+0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,0,0,0,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,1,1,0,1,0,1,0,1,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,1,1,0,1,0,1,0,0,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,0,1,1,1,0,1,1,1,0,0,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,0,0,0,1,1,1
+0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,0,0,0,1,1,1
+0,0,0,0,1,0,1,0,0,0,1,1,1,1,0,1,1,0,0,0,0,1,1,1
+0,0,0,0,1,0,1,0,0,0,1,1,1,1,0,1,1,0,0,0,0,1,1,1
+0,0,0,0,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,0,0,1,1,1
+0,0,0,0,1,0,1,1,0,0,1,1,1,1,0,0,0,1,0,0,0,1,1,1
+0,1,0,0,1,1,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,1,1,1
+0,1,0,0,1,1,1,0,1,0,1,1,1,1,0,0,0,0,0,0,0,1,1,1
+0,1,0,1,1,1,1,0,1,1,1,0,1,1,0,0,0,0,0,0,0,0,1,1
+0,1,0,1,1,1,1,0,1,1,1,0,1,1,0,0,0,0,0,0,0,0,1,1
+0,1,0,0,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1
\ No newline at end of file
diff --git a/pyspedas/mms/fgm/fgm.py b/pyspedas/mms/fgm/fgm.py
index 6c434dd3..4f8fddf6 100644
--- a/pyspedas/mms/fgm/fgm.py
+++ b/pyspedas/mms/fgm/fgm.py
@@ -1,15 +1,13 @@
-
+import re
from pyspedas.mms.mms_load_data import mms_load_data
from pyspedas.mms.fgm.mms_fgm_remove_flags import mms_fgm_remove_flags
from pyspedas.mms.fgm.mms_fgm_set_metadata import mms_fgm_set_metadata
from pyspedas.mms.fgm.mms_split_fgm_data import mms_split_fgm_data
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
from pytplot import del_data
-import re
@print_vars
def mms_load_fgm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy',
@@ -19,12 +17,12 @@ def mms_load_fgm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
min_version=None, cdf_version=None, spdf=False, always_prompt=False, no_split_vars=False,
get_fgm_ephemeris=False):
"""
- This function loads FGM data into tplot variables
+ Load MMS magnetometer data
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -64,11 +62,11 @@ def mms_load_fgm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -92,7 +90,7 @@ def mms_load_fgm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
@@ -100,11 +98,11 @@ def mms_load_fgm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
get_fgm_ephemeris: bool
Keep the ephemeris variables in the FGM files
- Returns:
+ Returns
+ ----------
List of tplot variables created.
"""
-
if (varformat is not None) and (not keep_flagged) and (not available) and (not notplot):
varformat_fetch = varformat+'|*_flag_*'
else:
diff --git a/pyspedas/mms/fgm/mms_curl.py b/pyspedas/mms/fgm/mms_curl.py
index 0ef9bed1..94456dab 100644
--- a/pyspedas/mms/fgm/mms_curl.py
+++ b/pyspedas/mms/fgm/mms_curl.py
@@ -1,9 +1,10 @@
-
+import logging
import math
import numpy as np
from pytplot import get_data, store_data, options
from pyspedas import tinterpol
+
def mms_curl(fields=None, positions=None, suffix=''):
"""
This function applies the curlometer technique to MMS FGM data
@@ -40,11 +41,11 @@ def mms_curl(fields=None, positions=None, suffix=''):
"""
if fields is None or positions is None:
- print('Error: B-field and spacecraft position keywords required.')
+ logging.error('Error: B-field and spacecraft position keywords required.')
return
if len(fields) != 4 or len(positions) != 4:
- print('Error, fields and positions keywords should be specified as 4-element arrays containing the tplot variable name for the field and position variables')
+ logging.error('Error, fields and positions keywords should be specified as 4-element arrays containing the tplot variable name for the field and position variables')
return
# *********************************************************
@@ -71,16 +72,16 @@ def mms_curl(fields=None, positions=None, suffix=''):
mms4_bfield = get_data(fields[3] + '_i')
if mms1_bfield is None:
- print('Error, B-field variable is missing: ' + fields[0])
+ logging.error('Error, B-field variable is missing: ' + fields[0])
return
elif mms2_bfield is None:
- print('Error, B-field variable is missing: ' + fields[1] + '_i')
+ logging.error('Error, B-field variable is missing: ' + fields[1] + '_i')
return
elif mms3_bfield is None:
- print('Error, B-field variable is missing: ' + fields[2] + '_i')
+ logging.error('Error, B-field variable is missing: ' + fields[2] + '_i')
return
elif mms4_bfield is None:
- print('Error, B-field variable is missing: ' + fields[3] + '_i')
+ logging.error('Error, B-field variable is missing: ' + fields[3] + '_i')
return
timesb1, datab1 = mms1_bfield
@@ -100,16 +101,16 @@ def mms_curl(fields=None, positions=None, suffix=''):
mms4_pos = get_data(positions[3] + '_i')
if mms1_pos is None:
- print('Error, S/C position variable is missing: ' + positions[0] + '_i')
+ logging.error('Error, S/C position variable is missing: ' + positions[0] + '_i')
return
elif mms2_pos is None:
- print('Error, S/C position variable is missing: ' + positions[1] + '_i')
+ logging.error('Error, S/C position variable is missing: ' + positions[1] + '_i')
return
elif mms3_pos is None:
- print('Error, S/C position variable is missing: ' + positions[2] + '_i')
+ logging.error('Error, S/C position variable is missing: ' + positions[2] + '_i')
return
elif mms4_pos is None:
- print('Error, S/C position variable is missing: ' + positions[3] + '_i')
+ logging.error('Error, S/C position variable is missing: ' + positions[3] + '_i')
return
timesp1, p1 = mms1_pos
@@ -218,17 +219,14 @@ def mms_curl(fields=None, positions=None, suffix=''):
options('divB' + suffix, 'ysubtitle', '[nT/km]')
options('curlB' + suffix, 'ytitle', 'curl(B)')
options('curlB' + suffix, 'ysubtitle', '[nT/km]')
- options('curlB' + suffix, 'Color', ['b', 'g', 'r'])
options('curlB' + suffix, 'legend_names', ['delBx', 'delBy', 'delBz'])
options('jtotal' + suffix, 'ytitle', 'J')
options('jtotal' + suffix, 'ysubtitle', '[A/m^2]')
- options('jtotal' + suffix, 'Color', ['b', 'g', 'r'])
options('jtotal' + suffix, 'legend_names', ['Jx', 'Jy', 'Jz'])
options('jperp' + suffix, 'ytitle', 'Jperp')
options('jperp' + suffix, 'ysubtitle', '[A/m^2]')
- options('jperp' + suffix, 'Color', ['b', 'g', 'r'])
options('jperp' + suffix, 'legend_names', ['Jperpx', 'Jperpy', 'Jperpz'])
options('jpar' + suffix, 'ytitle', 'Jparallel')
options('jpar' + suffix, 'ysubtitle', '[A/m^2]')
- return ['baryb', 'curlB', 'divB', 'jtotal', 'jpar', 'jperp', 'alpha', 'alphaparallel']
\ No newline at end of file
+ return ['baryb', 'curlB', 'divB', 'jtotal', 'jpar', 'jperp', 'alpha', 'alphaparallel']
diff --git a/pyspedas/mms/fgm/mms_fgm_remove_flags.py b/pyspedas/mms/fgm/mms_fgm_remove_flags.py
index e464e66c..23d55af9 100644
--- a/pyspedas/mms/fgm/mms_fgm_remove_flags.py
+++ b/pyspedas/mms/fgm/mms_fgm_remove_flags.py
@@ -1,25 +1,27 @@
import numpy as np
from pytplot import get_data, store_data
from pyspedas import tnames
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
+
def mms_fgm_remove_flags(probe, data_rate, level, instrument, suffix=''):
"""
This function removes data flagged by the FGM 'flag' variable (flags > 0),
in order to only show science quality data by default.
- Parameters:
- probe : str or list of str
+ Parameters
+ -----------
+ probe: str or list of str
probe or list of probes, valid values for MMS probes are ['1','2','3','4'].
- data_rate : str or list of str
+ data_rate: str or list of str
instrument data rates for FGM include 'brst' 'fast' 'slow' 'srvy'. The
default is 'srvy'.
- level : str
+ level: str
indicates level of data processing. the default if no level is specified is 'l2'
- instrument : str
+ instrument: str
instrument; probably 'fgm'
suffix: str
@@ -44,7 +46,7 @@ def mms_fgm_remove_flags(probe, data_rate, level, instrument, suffix=''):
if not data_exists(flag_var):
continue
- flagged = get_data(flag_var)
+ flagged = get_data(flag_var, dt=True)
if flagged is None:
continue
@@ -55,7 +57,7 @@ def mms_fgm_remove_flags(probe, data_rate, level, instrument, suffix=''):
for var_specifier in ['_b_gse_', '_b_gsm_', '_b_dmpa_', '_b_bcs_']:
var_name = 'mms'+str(this_probe)+'_'+instrument+var_specifier+this_dr+'_'+this_lvl+suffix
if var_name in tplot_vars:
- times, var_data = get_data(var_name)
+ times, var_data = get_data(var_name, dt=True)
metadata = get_data(var_name, metadata=True)
var_data[flagged_data] = np.nan
store_data(var_name, data={'x': times, 'y': var_data}, attr_dict=metadata)
diff --git a/pyspedas/mms/fgm/mms_fgm_set_metadata.py b/pyspedas/mms/fgm/mms_fgm_set_metadata.py
index 523b22d2..6c77dc1a 100644
--- a/pyspedas/mms/fgm/mms_fgm_set_metadata.py
+++ b/pyspedas/mms/fgm/mms_fgm_set_metadata.py
@@ -1,6 +1,7 @@
from pytplot import options
from pyspedas import tnames
+
def mms_fgm_set_metadata(probe, data_rate, level, instrument, suffix=''):
"""
This function updates the metadata for FGM data products
@@ -36,34 +37,26 @@ def mms_fgm_set_metadata(probe, data_rate, level, instrument, suffix=''):
for this_lvl in level:
if 'mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM')
- options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r', '#000000'])
options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Bx GSE', 'By GSE', 'Bz GSE', 'B total'])
if 'mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM')
- options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r', '#000000'])
options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Bx GSM', 'By GSM', 'Bz GSM', 'B total'])
if 'mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM')
- options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r', '#000000'])
options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Bx DMPA', 'By DMPA', 'Bz DMPA', 'B total'])
if 'mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM')
- options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r', '#000000'])
options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Bx BCS', 'By BCS', 'Bz BCS', 'B total'])
if 'mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+'_bvec'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM')
- options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'legend_names', ['Bx GSE', 'By GSE', 'Bz GSE'])
if 'mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+'_bvec'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM')
- options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'legend_names', ['Bx GSM', 'By GSM', 'Bz GSM'])
if 'mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+'_bvec'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM')
- options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'legend_names', ['Bx DMPA', 'By DMPA', 'Bz DMPA'])
if 'mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+'_bvec'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM')
- options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+'_bvec'+suffix, 'legend_names', ['Bx BCS', 'By BCS', 'Bz BCS'])
diff --git a/pyspedas/mms/fgm/mms_lingradest.py b/pyspedas/mms/fgm/mms_lingradest.py
index 8a2b8301..0ffbf255 100644
--- a/pyspedas/mms/fgm/mms_lingradest.py
+++ b/pyspedas/mms/fgm/mms_lingradest.py
@@ -1,15 +1,34 @@
+import logging
import numpy as np
from pyspedas import tinterpol
from pyspedas.analysis.lingradest import lingradest
from pytplot import get_data, store_data, options, join_vec
+
def mms_lingradest(fields=None, positions=None, suffix=''):
"""
-
+ Calculations of Grad, Curl, Curv,..., for MMS using
+ the Linear Gradient/Curl Estimator technique
+ see Chanteur, ISSI, 1998, Ch. 11
+
+ Parameters
+ ----------
+ fields : list of str
+ Names of the magnetic field data variables, ordered by spacecraft
+ (e.g., ['mms1_b_gse', 'mms2_b_gse', 'mms3_b_gse', 'mms4_b_gse']).
+ positions : list of str
+ Names of the spacecraft position data variables, ordered by spacecraft
+ (e.g., ['mms1_pos_gse', 'mms2_pos_gse', 'mms3_pos_gse', 'mms4_pos_gse']).
+ suffix : str, optional
+ Suffix to add to the names of the output variables.
+
+ Returns
+ -------
+ None
+ The function stores the computed parameters as PyTplot variables
"""
-
if fields is None or positions is None:
- print('B-field and spacecraft position keywords required.')
+ logging.error('B-field and spacecraft position keywords required.')
return
# interpolate the magnetic field data all onto the same timeline (MMS1):
diff --git a/pyspedas/mms/fgm/mms_split_fgm_data.py b/pyspedas/mms/fgm/mms_split_fgm_data.py
index 9a5bdbe1..4cf17453 100644
--- a/pyspedas/mms/fgm/mms_split_fgm_data.py
+++ b/pyspedas/mms/fgm/mms_split_fgm_data.py
@@ -1,15 +1,16 @@
import logging
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
from pytplot import get_data, store_data, options
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_split_fgm_data(probe, data_rate, level, instrument, suffix=''):
"""
-
-
+ Helper routine for splitting 4-vector FGM data (Bx, By, Bz, b_total)
+ into 2 tplot variables, one for the vector (Bx, By, Bz), and one for the total
"""
probe = probe.lower()
@@ -35,7 +36,7 @@ def mms_split_fgm_data(probe, data_rate, level, instrument, suffix=''):
if not data_exists(tplot_name):
continue
- fgm_data = get_data(tplot_name)
+ fgm_data = get_data(tplot_name, dt=True)
if fgm_data is None:
continue
@@ -54,4 +55,4 @@ def mms_split_fgm_data(probe, data_rate, level, instrument, suffix=''):
out_vars.append(tplot_name + '_bvec' + suffix)
out_vars.append(tplot_name + '_btot' + suffix)
- return out_vars
\ No newline at end of file
+ return out_vars
diff --git a/pyspedas/mms/fpi/fpi.py b/pyspedas/mms/fpi/fpi.py
index 7be7f516..05f7903b 100644
--- a/pyspedas/mms/fpi/fpi.py
+++ b/pyspedas/mms/fpi/fpi.py
@@ -7,6 +7,7 @@
from pyspedas.mms.mms_config import CONFIG
from pytplot import tplot_rename, del_data
+
@print_vars
def mms_load_fpi(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast',
level='l2', datatype='*', varformat=None, varnames=[], suffix='',
@@ -14,12 +15,12 @@ def mms_load_fpi(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast
available=False, notplot=False, latest_version=False, major_version=False,
min_version=None, cdf_version=None, spdf=False, always_prompt=False):
"""
- This function loads FPI data into tplot variables
+ Load data from the Fast Plasma Investigation (FPI)
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -66,11 +67,11 @@ def mms_load_fpi(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -90,16 +91,16 @@ def mms_load_fpi(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ -----------
List of tplot variables created.
"""
-
# different datatypes for burst mode files
if data_rate.lower() == 'brst':
if isinstance(datatype, str):
@@ -159,8 +160,10 @@ def mms_load_fpi(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast
notplot=notplot, latest_version=latest_version, major_version=major_version,
min_version=min_version,
cdf_version=cdf_version, spdf=spdf, always_prompt=always_prompt)
- tvars.extend(tplotnames_errflags_emom)
- tvars.extend(tplotnames_errflags_edist)
+ if tplotnames_errflags_emom is not None:
+ tvars.extend(tplotnames_errflags_emom)
+ if tplotnames_errflags_edist is not None:
+ tvars.extend(tplotnames_errflags_edist)
else:
# user didn't request both dist and moments, so no variables should have been clobbered
# but we still need to append _dist, _moms to the end of the names
@@ -200,8 +203,10 @@ def mms_load_fpi(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast
notplot=notplot, latest_version=latest_version, major_version=major_version,
min_version=min_version,
cdf_version=cdf_version, spdf=spdf, always_prompt=always_prompt)
- tvars.extend(tplotnames_errflags_imom)
- tvars.extend(tplotnames_errflags_idist)
+ if tplotnames_errflags_imom is not None:
+ tvars.extend(tplotnames_errflags_imom)
+ if tplotnames_errflags_idist is not None:
+ tvars.extend(tplotnames_errflags_idist)
else:
# user didn't request both dist and moments, so no variables should have been clobbered
# but we still need to append _dist, _moms to the end of the names
diff --git a/pyspedas/mms/fpi/mms_fpi_ang_ang.py b/pyspedas/mms/fpi/mms_fpi_ang_ang.py
index 29dfce51..98bb0838 100644
--- a/pyspedas/mms/fpi/mms_fpi_ang_ang.py
+++ b/pyspedas/mms/fpi/mms_fpi_ang_ang.py
@@ -7,19 +7,29 @@
import pyspedas
from pyspedas import time_double
from pyspedas.mms.fpi.mms_get_fpi_dist import mms_get_fpi_dist
+from pyspedas.mms.fpi.mms_pad_fpi import mms_pad_fpi
def mms_fpi_ang_ang(time,
species='i',
probe='1',
data_rate='fast',
+ fgm_data_rate='srvy',
level='l2',
+ fgm_level='l2',
+
center_measurement=False,
energy_range=[10, 30000],
xsize=8,
ysize=8,
cmap='spedas',
zrange=[None, None],
+ nocontours=False,
+ save_png=None,
+ save_jpeg=None,
+ save_eps=None,
+ save_svg=None,
+ dpi=300,
display=True):
"""
Creates various plots directly from the FPI distribution functions, including:
@@ -77,6 +87,7 @@ def mms_fpi_ang_ang(time,
level=level,
center_measurement=center_measurement)
+ fgm_vars = pyspedas.mms.fgm(trange=trange, probe=probe, data_rate=fgm_data_rate, level=fgm_level)
dist = get_data('mms'+probe+'_d'+species+'s_dist_'+data_rate)
closest_idx = np.searchsorted(dist.times, time_double(time), side='left')
@@ -109,6 +120,10 @@ def mms_fpi_ang_ang(time,
spec_options = {}
+ pa_dist = mms_pad_fpi(dists,
+ time=dist.times[closest_idx],
+ mag_data='mms'+probe+'_fgm_b_gse_'+fgm_data_rate+'_'+fgm_level+'_bvec')
+
# the first figure: azimuth vs. zenith
fig, axes = plt.subplots()
fig.set_size_inches(xsize, ysize)
@@ -139,6 +154,24 @@ def mms_fpi_ang_ang(time,
colorbar = fig.colorbar(im, cax=cax)
colorbar.set_label('f ($s^3$/$cm^6$)')
+ if not nocontours:
+ num_levels = 16
+ contour_levels = np.array(180*np.arange(num_levels+1)/num_levels, dtype=int)
+ contours = axes.contour(pa_dist['wpol'], pa_dist['waz'], pa_dist['pa_azpol'], contour_levels, linewidths=0.5)
+ axes.clabel(contours, contours.levels, inline=True, fontsize=10)
+
+ if save_png is not None and save_png != '':
+ plt.savefig(save_png + '_azimuth_vs_zenith.png', dpi=dpi)
+
+ if save_jpeg is not None and save_jpeg != '':
+ plt.savefig(save_jpeg + '_azimuth_vs_zenith.jpeg', dpi=dpi)
+
+ if save_eps is not None and save_eps != '':
+ plt.savefig(save_eps + '_azimuth_vs_zenith.eps', dpi=dpi)
+
+ if save_svg is not None and save_svg != '':
+ plt.savefig(save_svg + '_azimuth_vs_zenith.svg', dpi=dpi)
+
# Zenith vs. energy
fig2, axes2 = plt.subplots()
fig2.set_size_inches(xsize, ysize)
@@ -163,6 +196,18 @@ def mms_fpi_ang_ang(time,
colorbar2 = fig2.colorbar(im2, cax=cax2)
colorbar2.set_label('f ($s^3$/$cm^6$)')
+ if save_png is not None and save_png != '':
+ plt.savefig(save_png + '_zenith_vs_energy.png', dpi=dpi)
+
+ if save_jpeg is not None and save_jpeg != '':
+ plt.savefig(save_jpeg + '_zenith_vs_energy.jpeg', dpi=dpi)
+
+ if save_eps is not None and save_eps != '':
+ plt.savefig(save_eps + '_zenith_vs_energy.eps', dpi=dpi)
+
+ if save_svg is not None and save_svg != '':
+ plt.savefig(save_svg + '_zenith_vs_energy.svg', dpi=dpi)
+
# Azimuth vs. energy
fig3, axes3 = plt.subplots()
fig3.set_size_inches(xsize, ysize)
@@ -185,5 +230,46 @@ def mms_fpi_ang_ang(time,
colorbar3 = fig3.colorbar(im3, cax=cax3)
colorbar3.set_label('f ($s^3$/$cm^6$)')
+ if save_png is not None and save_png != '':
+ plt.savefig(save_png + '_azimuth_vs_energy.png', dpi=dpi)
+
+ if save_jpeg is not None and save_jpeg != '':
+ plt.savefig(save_jpeg + '_azimuth_vs_energy.jpeg', dpi=dpi)
+
+ if save_eps is not None and save_eps != '':
+ plt.savefig(save_eps + '_azimuth_vs_energy.eps', dpi=dpi)
+
+ if save_svg is not None and save_svg != '':
+ plt.savefig(save_svg + '_azimuth_vs_energy.svg', dpi=dpi)
+
+ # PA vs. energy
+ fig4, axes4 = plt.subplots()
+ fig4.set_size_inches(xsize, ysize)
+
+ fig4.subplots_adjust(left=0.14, right=0.85)
+ axes4.set_yscale('log')
+ axes4.set_xlabel('Pitch angle (deg)')
+ axes4.set_ylabel('Energy (eV)')
+
+ spec_options['norm'] = mpl.colors.LogNorm(vmin=zrange[0], vmax=zrange[1])
+
+ im4 = axes4.pcolormesh(pa_dist['pa'], pa_dist['egy'], pa_dist['data'], **spec_options)
+
+ cax4 = fig4.add_axes([box.xmax + pad, box.ymin, width, box.height])
+ colorbar4 = fig4.colorbar(im4, cax=cax4)
+ colorbar4.set_label('f ($s^3$/$cm^6$)')
+
+ if save_png is not None and save_png != '':
+ plt.savefig(save_png + '_pa_vs_energy.png', dpi=dpi)
+
+ if save_jpeg is not None and save_jpeg != '':
+ plt.savefig(save_jpeg + '_pa_vs_energy.jpeg', dpi=dpi)
+
+ if save_eps is not None and save_eps != '':
+ plt.savefig(save_eps + '_pa_vs_energy.eps', dpi=dpi)
+
+ if save_svg is not None and save_svg != '':
+ plt.savefig(save_svg + '_pa_vs_energy.svg', dpi=dpi)
+
if display:
plt.show()
diff --git a/pyspedas/mms/fpi/mms_fpi_make_compressionlossbars.py b/pyspedas/mms/fpi/mms_fpi_make_compressionlossbars.py
index 91e8da6d..836787c7 100644
--- a/pyspedas/mms/fpi/mms_fpi_make_compressionlossbars.py
+++ b/pyspedas/mms/fpi/mms_fpi_make_compressionlossbars.py
@@ -1,7 +1,9 @@
+import logging
import numpy as np
from fnmatch import fnmatch
from pytplot import get_data, store_data, options
-from pyspedas import time_double
+from pyspedas import time_datetime
+
def mms_fpi_make_compressionlossbars(tname, lossy=False):
"""
@@ -33,13 +35,12 @@ def mms_fpi_make_compressionlossbars(tname, lossy=False):
List of the tplot variables created.
"""
-
if fnmatch(tname, 'mms?_dis*'):
instrument = 'DIS'
elif fnmatch(tname, 'mms?_des*'):
instrument = 'DES'
else:
- print('Unable to determine instrument from variable name.')
+ logging.error('Unable to determine instrument from variable name.')
return
if instrument == 'DES':
@@ -48,19 +49,19 @@ def mms_fpi_make_compressionlossbars(tname, lossy=False):
colors = 'blue'
if fnmatch(tname, '*_fast*'):
- print('All fast survey data are lossy compressed, so there is no need to create this bar.')
+ logging.info('All fast survey data are lossy compressed, so there is no need to create this bar.')
return
elif fnmatch(tname, '*_brst*'):
data_rate = 'Brst'
else:
- print('Unable to determine data rate from variable name.')
+ logging.error('Unable to determine data rate from variable name.')
return
- data = get_data(tname)
+ data = get_data(tname, dt=True)
metadata = get_data(tname, metadata=True)
if data is None:
- print('Problem reading the variable: ' + tname)
+ logging.error('Problem reading the variable: ' + tname)
return
flagline = np.zeros(len(data.times))
@@ -70,7 +71,7 @@ def mms_fpi_make_compressionlossbars(tname, lossy=False):
version = file_id.split('_v')[1].split('.')
if version[0] == '2':
if version[1] == '1':
- if data.times[0] < time_double('2016-04-01'):
+ if data.times[0] < time_datetime('2016-04-01'):
lossy = 3
else:
lossy = 1
@@ -99,4 +100,4 @@ def mms_fpi_make_compressionlossbars(tname, lossy=False):
options(tname + '_flagbars', 'thick', 4)
options(tname + '_flagbars', 'border', False)
- return [tname + '_flagbars']
\ No newline at end of file
+ return [tname + '_flagbars']
diff --git a/pyspedas/mms/fpi/mms_fpi_make_errorflagbars.py b/pyspedas/mms/fpi/mms_fpi_make_errorflagbars.py
index 530b2736..ce3a5643 100644
--- a/pyspedas/mms/fpi/mms_fpi_make_errorflagbars.py
+++ b/pyspedas/mms/fpi/mms_fpi_make_errorflagbars.py
@@ -1,6 +1,7 @@
import numpy as np
from pytplot import get_data, store_data, options
+
def mms_fpi_make_errorflagbars(tname, level='l2'):
"""
This procedure creates FPI error flag bars for plotting
@@ -76,11 +77,10 @@ def mms_fpi_make_errorflagbars(tname, level='l2'):
-----------
List containing the names of the created tplot variables
"""
-
instrument = tname.split('_')[1].upper()
data_rate = tname.split('_')[3].capitalize()
- data = get_data(tname)
+ data = get_data(tname, dt=True)
metadata = get_data(tname, metadata=True)
if metadata is None:
@@ -225,7 +225,11 @@ def mms_fpi_make_errorflagbars(tname, level='l2'):
flagline = np.zeros((len(data.times), 2))
for i in [0, 1]:
for j in range(len(flags)):
- if int(flags[13-i:13-i+1][0]) == 0:
+ try:
+ flagset = int(flags[13-i:13-i+1][0])
+ except IndexError:
+ continue
+ if flagset == 0:
flagline[j, i] = np.nan
else:
flagline[j, i] = 1
@@ -243,4 +247,4 @@ def mms_fpi_make_errorflagbars(tname, level='l2'):
options(tname + '_flagbars_dist', 'markers', 's')
out_vars = [tname + '_flagbars_dist']
- return out_vars
\ No newline at end of file
+ return out_vars
diff --git a/pyspedas/mms/fpi/mms_fpi_set_metadata.py b/pyspedas/mms/fpi/mms_fpi_set_metadata.py
index f860251c..a7243f03 100644
--- a/pyspedas/mms/fpi/mms_fpi_set_metadata.py
+++ b/pyspedas/mms/fpi/mms_fpi_set_metadata.py
@@ -1,6 +1,7 @@
from pytplot import options
from pyspedas import tnames
+
def mms_fpi_set_metadata(probe, data_rate, datatype, level, suffix=''):
"""
This function updates the metadata for FPI data products
@@ -39,7 +40,6 @@ def mms_fpi_set_metadata(probe, data_rate, datatype, level, suffix=''):
options('mms'+this_probe+'_des_energyspectr_par_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES')
options('mms'+this_probe+'_des_energyspectr_par_'+this_dr+suffix, 'ylog', True)
options('mms'+this_probe+'_des_energyspectr_par_'+this_dr+suffix, 'zlog', True)
- options('mms'+this_probe+'_des_energyspectr_par_'+this_dr+suffix, 'Colormap', 'spedas')
options('mms'+this_probe+'_des_energyspectr_par_'+this_dr+suffix, 'ztitle', '[keV/(cm^2 s sr keV)]')
options('mms'+this_probe+'_des_energyspectr_par_'+this_dr+suffix, 'spec', True)
@@ -47,14 +47,12 @@ def mms_fpi_set_metadata(probe, data_rate, datatype, level, suffix=''):
options('mms'+this_probe+'_des_energyspectr_anti_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES')
options('mms'+this_probe+'_des_energyspectr_anti_'+this_dr+suffix, 'ylog', True)
options('mms'+this_probe+'_des_energyspectr_anti_'+this_dr+suffix, 'zlog', True)
- options('mms'+this_probe+'_des_energyspectr_anti_'+this_dr+suffix, 'Colormap', 'spedas')
options('mms'+this_probe+'_des_energyspectr_anti_'+this_dr+suffix, 'ztitle', '[keV/(cm^2 s sr keV)]')
if 'mms'+this_probe+'_des_energyspectr_perp_'+this_dr+suffix in tvars:
options('mms'+this_probe+'_des_energyspectr_perp_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES')
options('mms'+this_probe+'_des_energyspectr_perp_'+this_dr+suffix, 'ylog', True)
options('mms'+this_probe+'_des_energyspectr_perp_'+this_dr+suffix, 'zlog', True)
- options('mms'+this_probe+'_des_energyspectr_perp_'+this_dr+suffix, 'Colormap', 'spedas')
options('mms'+this_probe+'_des_energyspectr_perp_'+this_dr+suffix, 'ztitle', '[keV/(cm^2 s sr keV)]')
options('mms'+this_probe+'_des_energyspectr_anti_'+this_dr+suffix, 'spec', True)
@@ -62,38 +60,32 @@ def mms_fpi_set_metadata(probe, data_rate, datatype, level, suffix=''):
options('mms'+this_probe+'_des_energyspectr_omni_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES')
options('mms'+this_probe+'_des_energyspectr_omni_'+this_dr+suffix, 'ylog', True)
options('mms'+this_probe+'_des_energyspectr_omni_'+this_dr+suffix, 'zlog', True)
- options('mms'+this_probe+'_des_energyspectr_omni_'+this_dr+suffix, 'Colormap', 'spedas')
options('mms'+this_probe+'_des_energyspectr_omni_'+this_dr+suffix, 'ztitle', '[keV/(cm^2 s sr keV)]')
options('mms'+this_probe+'_des_energyspectr_omni_'+this_dr+suffix, 'spec', True)
if 'mms'+this_probe+'_des_pitchangdist_lowen_'+this_dr+suffix in tvars:
options('mms'+this_probe+'_des_pitchangdist_lowen_'+this_dr+suffix, 'zlog', True)
- options('mms'+this_probe+'_des_pitchangdist_lowen_'+this_dr+suffix, 'Colormap', 'spedas')
options('mms'+this_probe+'_des_pitchangdist_lowen_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES')
options('mms'+this_probe+'_des_pitchangdist_lowen_'+this_dr+suffix, 'ztitle', '[keV/(cm^2 s sr keV)]')
options('mms'+this_probe+'_des_pitchangdist_lowen_'+this_dr+suffix, 'spec', True)
if 'mms'+this_probe+'_des_pitchangdist_miden_'+this_dr+suffix in tvars:
options('mms'+this_probe+'_des_pitchangdist_miden_'+this_dr+suffix, 'zlog', True)
- options('mms'+this_probe+'_des_pitchangdist_miden_'+this_dr+suffix, 'Colormap', 'spedas')
options('mms'+this_probe+'_des_pitchangdist_miden_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES')
options('mms'+this_probe+'_des_pitchangdist_miden_'+this_dr+suffix, 'ztitle', '[keV/(cm^2 s sr keV)]')
options('mms'+this_probe+'_des_pitchangdist_miden_'+this_dr+suffix, 'spec', True)
if 'mms'+this_probe+'_des_pitchangdist_highen_'+this_dr+suffix in tvars:
options('mms'+this_probe+'_des_pitchangdist_highen_'+this_dr+suffix, 'zlog', True)
- options('mms'+this_probe+'_des_pitchangdist_highen_'+this_dr+suffix, 'Colormap', 'spedas')
options('mms'+this_probe+'_des_pitchangdist_highen_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES')
options('mms'+this_probe+'_des_pitchangdist_highen_'+this_dr+suffix, 'ztitle', '[keV/(cm^2 s sr keV)]')
options('mms'+this_probe+'_des_pitchangdist_highen_'+this_dr+suffix, 'spec', True)
if 'mms'+this_probe+'_des_bulkv_dbcs_'+this_dr+suffix in tvars:
- options('mms'+this_probe+'_des_bulkv_dbcs_'+this_dr+suffix, 'color', ['b', 'g', 'r'])
options('mms'+this_probe+'_des_bulkv_dbcs_'+this_dr+suffix, 'legend_names', ['Vx DBCS', 'Vy DBCS', 'Vz DBCS'])
options('mms'+this_probe+'_des_bulkv_dbcs_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES velocity')
if 'mms'+this_probe+'_des_bulkv_gse_'+this_dr+suffix in tvars:
- options('mms'+this_probe+'_des_bulkv_gse_'+this_dr+suffix, 'color', ['b', 'g', 'r'])
options('mms'+this_probe+'_des_bulkv_gse_'+this_dr+suffix, 'legend_names', ['Vx GSE', 'Vy GSE', 'Vz GSE'])
options('mms'+this_probe+'_des_bulkv_gse_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DES velocity')
@@ -105,17 +97,14 @@ def mms_fpi_set_metadata(probe, data_rate, datatype, level, suffix=''):
options('mms'+this_probe+'_dis_energyspectr_omni_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DIS')
options('mms'+this_probe+'_dis_energyspectr_omni_'+this_dr+suffix, 'ylog', True)
options('mms'+this_probe+'_dis_energyspectr_omni_'+this_dr+suffix, 'zlog', True)
- options('mms'+this_probe+'_dis_energyspectr_omni_'+this_dr+suffix, 'Colormap', 'spedas')
options('mms'+this_probe+'_dis_energyspectr_omni_'+this_dr+suffix, 'ztitle', '[keV/(cm^2 s sr keV)]')
options('mms'+this_probe+'_dis_energyspectr_omni_'+this_dr+suffix, 'spec', True)
if 'mms'+this_probe+'_dis_bulkv_dbcs_'+this_dr+suffix in tvars:
- options('mms'+this_probe+'_dis_bulkv_dbcs_'+this_dr+suffix, 'color', ['b', 'g', 'r'])
options('mms'+this_probe+'_dis_bulkv_dbcs_'+this_dr+suffix, 'legend_names', ['Vx DBCS', 'Vy DBCS', 'Vz DBCS'])
options('mms'+this_probe+'_dis_bulkv_dbcs_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DIS velocity')
if 'mms'+this_probe+'_dis_bulkv_gse_'+this_dr+suffix in tvars:
- options('mms'+this_probe+'_dis_bulkv_gse_'+this_dr+suffix, 'color', ['b', 'g', 'r'])
options('mms'+this_probe+'_dis_bulkv_gse_'+this_dr+suffix, 'legend_names', ['Vx GSE', 'Vy GSE', 'Vz GSE'])
options('mms'+this_probe+'_dis_bulkv_gse_'+this_dr+suffix, 'ytitle', 'MMS'+this_probe+' DIS velocity')
diff --git a/pyspedas/mms/fpi/mms_fpi_split_tensor.py b/pyspedas/mms/fpi/mms_fpi_split_tensor.py
index c6769c65..153663fa 100644
--- a/pyspedas/mms/fpi/mms_fpi_split_tensor.py
+++ b/pyspedas/mms/fpi/mms_fpi_split_tensor.py
@@ -1,5 +1,7 @@
+import logging
from pytplot import get_data, store_data
+
def mms_fpi_split_tensor(tensor_variable):
"""
Splits FPI tensor variables (pressure, temperature) into their components
@@ -14,39 +16,39 @@ def mms_fpi_split_tensor(tensor_variable):
List of variables created.
"""
- data = get_data(tensor_variable)
+ data = get_data(tensor_variable, dt=True)
if data is None:
- print('Problem returning data from the variable: ' + tensor_variable)
+ logging.error('Problem returning data from the variable: ' + tensor_variable)
return
saved = store_data(tensor_variable + '_xx', data={'x': data.times, 'y': data.y[:, 0, 0]})
if not saved:
- print('Problem saving xx component')
+ logging.warning('Problem saving xx component')
saved = store_data(tensor_variable + '_xy', data={'x': data.times, 'y': data.y[:, 0, 1]})
if not saved:
- print('Problem saving xy component')
+ logging.warning('Problem saving xy component')
saved = store_data(tensor_variable + '_xz', data={'x': data.times, 'y': data.y[:, 0, 2]})
if not saved:
- print('Problem saving xz component')
+ logging.warning('Problem saving xz component')
saved = store_data(tensor_variable + '_yx', data={'x': data.times, 'y': data.y[:, 1, 0]})
if not saved:
- print('Problem saving yx component')
+ logging.warning('Problem saving yx component')
saved = store_data(tensor_variable + '_yy', data={'x': data.times, 'y': data.y[:, 1, 1]})
if not saved:
- print('Problem saving yy component')
+ logging.warning('Problem saving yy component')
saved = store_data(tensor_variable + '_yz', data={'x': data.times, 'y': data.y[:, 1, 2]})
if not saved:
- print('Problem saving yz component')
+ logging.warning('Problem saving yz component')
saved = store_data(tensor_variable + '_zx', data={'x': data.times, 'y': data.y[:, 2, 0]})
if not saved:
- print('Problem saving zx component')
+ logging.warning('Problem saving zx component')
saved = store_data(tensor_variable + '_zy', data={'x': data.times, 'y': data.y[:, 2, 1]})
if not saved:
- print('Problem saving zy component')
+ logging.warning('Problem saving zy component')
saved = store_data(tensor_variable + '_zz', data={'x': data.times, 'y': data.y[:, 2, 2]})
if not saved:
- print('Problem saving zz component')
+ logging.warning('Problem saving zz component')
components = ['xx', 'xy', 'xz', 'yx', 'yy', 'yz', 'zx', 'zy', 'zz']
return [tensor_variable + '_' + component for component in components]
diff --git a/pyspedas/mms/fpi/mms_get_fpi_dist.py b/pyspedas/mms/fpi/mms_get_fpi_dist.py
index 3b2c93ec..86e460b8 100644
--- a/pyspedas/mms/fpi/mms_get_fpi_dist.py
+++ b/pyspedas/mms/fpi/mms_get_fpi_dist.py
@@ -1,6 +1,4 @@
-
import logging
-from copy import deepcopy
import numpy as np
from pyspedas import time_double
from pytplot import get_data
diff --git a/pyspedas/mms/fpi/mms_load_fpi_calc_pad.py b/pyspedas/mms/fpi/mms_load_fpi_calc_pad.py
index ffbccf18..095dd3ad 100644
--- a/pyspedas/mms/fpi/mms_load_fpi_calc_pad.py
+++ b/pyspedas/mms/fpi/mms_load_fpi_calc_pad.py
@@ -1,6 +1,7 @@
from pyspedas import tnames
from pytplot import get_data, store_data, options
+
def mms_load_fpi_calc_pad(probe='1', level='sitl', datatype='', data_rate='', suffix='', autoscale=True):
"""
Calculates the omni-directional pitch angle distribution (summed and averaged)
@@ -31,7 +32,8 @@ def mms_load_fpi_calc_pad(probe='1', level='sitl', datatype='', data_rate='', su
autoscale: bool
If set, use the default zrange; otherwise, use the min and max of the data for the zrange
- Returns:
+ Returns
+ ----------
List of tplot variables created.
"""
@@ -67,9 +69,9 @@ def mms_load_fpi_calc_pad(probe='1', level='sitl', datatype='', data_rate='', su
pad_avg_name = obsstr+'PitchAngDist_avg'+suffix
- low_en = get_data(pad_vars[0])
- mid_en = get_data(pad_vars[1])
- high_en = get_data(pad_vars[2])
+ low_en = get_data(pad_vars[0], dt=True)
+ mid_en = get_data(pad_vars[1], dt=True)
+ high_en = get_data(pad_vars[2], dt=True)
if low_en is None or mid_en is None or high_en is None:
v3_low_pad = tnames(pad_vars[0].lower()+'_'+data_rate)
@@ -78,9 +80,9 @@ def mms_load_fpi_calc_pad(probe='1', level='sitl', datatype='', data_rate='', su
if v3_low_pad == [] or v3_mid_pad == [] or v3_high_pad == []:
continue
- low_en = get_data(v3_low_pad[0])
- mid_en = get_data(v3_mid_pad[0])
- high_en = get_data(v3_high_pad[0])
+ low_en = get_data(v3_low_pad[0], dt=True)
+ mid_en = get_data(v3_mid_pad[0], dt=True)
+ high_en = get_data(v3_high_pad[0], dt=True)
pad_avg_name = pad_avg_name.lower()
e_pad_sum = low_en.y+mid_en.y+high_en.y
@@ -109,7 +111,6 @@ def mms_load_fpi_calc_pad(probe='1', level='sitl', datatype='', data_rate='', su
options(pad_avg_name, 'yrange', [0, 180])
options(pad_avg_name, 'zlog', True)
options(pad_avg_name, 'spec', True)
- options(pad_avg_name, 'Colormap', 'spedas')
out_vars.append(pad_avg_name)
- return out_vars
\ No newline at end of file
+ return out_vars
diff --git a/pyspedas/mms/fpi/mms_pad_fpi.py b/pyspedas/mms/fpi/mms_pad_fpi.py
new file mode 100644
index 00000000..f67dc9b2
--- /dev/null
+++ b/pyspedas/mms/fpi/mms_pad_fpi.py
@@ -0,0 +1,537 @@
+from copy import deepcopy
+import numpy as np
+from scipy.ndimage.interpolation import shift
+from scipy.constants import c as const_c
+from pyspedas import time_double
+from pyspedas.particles.spd_slice2d.slice2d_nearest import slice2d_nearest
+from pyspedas.particles.spd_slice2d.slice2d_intrange import slice2d_intrange
+from pyspedas.particles.spd_slice2d.slice2d_get_support import slice2d_get_support
+from pyspedas.particles.spd_slice2d.slice2d_s2c import slice2d_s2c
+from pyspedas.particles.spd_slice2d.tplot_average import tplot_average
+from pyspedas.mms.particles.moka_mms_clean_data import moka_mms_clean_data
+
+
+def mms_pad_fpi(dists,
+ #disterr,
+ time=None,
+ window=None,
+ center_time=False,
+ trange=None,
+ samples=None,
+ mag_data=None,
+ vel_data=None,
+ nbin=18,
+ da=45.0,
+ da2=45.0,
+ pr___0=None,
+ pr__90=None,
+ pr_180=None,
+ subtract_bulk=False,
+ units='df_cm',
+ oclreal=False,
+ norm=False):
+ """
+ Calculate the pitch-angle distribution (angle vs energy plot) and energy spectrum
+ in the omni, para, perp, and anti-para directions using MMS FPI data.
+ Also returns the one-count-level.
+
+ Parameters
+ ------------
+ dists : list of dicts
+ List of dictionaries containing the FPI particle distribution data.
+ time : str or float, optional
+ Time at which the pad will be computed.
+ window : float, optional
+ Length in seconds from TIME over which data will be averaged.
+ center_time : bool, optional
+ Flag denoting that TIME should be the midpoint for the window instead of the beginning.
+ trange : list, optional
+ Two-element time range over which data will be averaged.
+ samples : int or float, optional
+ Number of nearest samples to TIME to average.
+ mag_data : str, optional
+ Name of tplot variable containing magnetic field data or 3-vector.
+ This will be used for pitch-angle calculation and must be in the
+ same coordinates as the particle data.
+ vel_data : str, optional
+ Name of tplot variable containing the bulk velocity data or 3-vector.
+ This will be used for pitch-angle calculation and must be in the
+ same coordinates as the particle data.
+ nbin : int, optional
+ Number of bins in the pitch-angle direction.
+ da : float, optional
+ Pitch angle range for the "para" spectrum, default = 45.0.
+ da2 : float, optional
+ Pitch angle range for the "anti-para" spectrum, default = 45.0.
+ pr___0 : list or tuple, optional
+ Pitch angle range for the "para" spectrum, default = [0, 45].
+ pr__90 : list or tuple, optional
+ Pitch angle range for the "perp" spectrum, default = [45, 135].
+ pr_180 : list or tuple, optional
+ Pitch angle range for the "anti-para" spectrum, default = [135, 180].
+ subtract_bulk : bool, optional
+ Set True to subtract bulk velocity from velocity data.
+ units : str, optional
+ Units for both the pitch-angle-distribution (pad) and energy spectrum.
+ Options are 'eflux' [eV/(cm!U2!N s sr eV)] or 'df_km' [s!U3!N / km!U6!N'].
+ The default is 'eflux'.
+ oclreal : bool, optional
+ Set True to return the real one-count level
+ norm : bool, optional
+ Set True for normalizing the data at each energy bin.
+
+ Returns
+ --------
+ results : dict
+ A dictionary containing the following results:
+ - trange : list
+ Two-element time range over which data was averaged.
+ - egy : array-like
+ Energy values in eV.
+ - pa : array-like
+ Pitch angle values in degrees.
+ - data : array-like, shape (Nenergy, Npitch)
+ Pitch-angle distribution (angle vs energy plot).
+ - datanorm : array-like, shape (Nenergy, Npitch)
+ Normalized pitch-angle distribution (angle vs energy plot).
+ - numSlices : int
+ Number of slices in the data.
+ - nbin : int
+ Number of pitch angle bins.
+ - units : str
+ Units of the returned data.
+ - subtract_bulk : bool
+ True if bulk velocity was subtracted from velocity data.
+ - egyrange : list
+ Energy range of the data.
+ - spec___0 : array-like
+ Energy spectrum in the "para" direction.
+ - spec__90 : array-like
+ Energy spectrum in the "perp" direction.
+ - spec_180 : array-like
+ Energy spectrum in the "anti-para" direction.
+ - spec_omn : array-like
+ Energy spectrum in the omni direction.
+ - cnts___0 : array-like
+ One-count-level data in the "para" direction.
+ - cnts__90 : array-like
+ One-count-level data in the "perp" direction.
+ - cnts_180 : array-like
+ One-count-level data in the "anti-para" direction.
+ - cnts_omn : array-like
+ One-count-level data in the omni direction.
+ - oclv___0 : array-like
+ One-count-level data in the "para" direction.
+ - oclv__90 : array-like
+ One-count-level data in the "perp" direction.
+ - oclv_180 : array-like
+ One-count-level data in the "anti-para" direction.
+ - oclv_omn : array-like
+ One-count-level data in the omni direction.
+ - eror___0 : array-like
+ Error data in the "para" direction.
+ - eror__90 : array-like
+ Error data in the "perp" direction.
+ - eror_180 : array-like
+ Error data in the "anti-para" direction.
+ - eror_omn : array-like
+ Error data in the omni direction.
+ - vbulk_para : float
+ Bulk velocity in the "para" direction.
+ - vbulk_perp_abs : float
+ Absolute value of bulk velocity in the "perp" direction.
+ - vbulk_vxb : float
+ Velocity of the cross product of magnetic field and bulk velocity.
+ - vbulk_exb : float
+ Velocity of the cross product of electric field and magnetic field.
+ - bnrm : float
+ Average magnetic field magnitude.
+ - Vbulk : float
+ Bulk velocity magnitude.
+ - bfield : float
+ Magnetic field magnitude.
+ - species : str
+ Name of the particle species.
+ - pa_azpol : array-like, shape (Npitch, 2)
+ Pitch angle values in degrees in azimuthal and polar coordinates.
+ """
+
+ dr = np.pi/180.0
+ rd = 1.0/dr
+
+ if pr___0 is None:
+ pr___0 = [0.0, da] # para pitch angle range
+ if pr__90 is None:
+ pr__90 = [90-da2, 90+da2] # perp pitch angle range
+ if pr_180 is None:
+ pr_180 = [180-da, 180.0] # anti-para pitch angle range
+
+ if trange is None:
+ if time is None:
+ print('Please specifiy a time or time range over which to compute the pad. For example: ')
+ print(' "time=t, window=w" or "trange=tr" or "time=t, samples=n"')
+ return
+ if window is None and samples is None:
+ samples = 1 # use single closest distribution by default
+ else:
+ # time range already provided
+ trange = [np.nanmin(time_double(trange)), np.nanmax(time_double(trange))]
+
+ if time is not None:
+ # get the time range if one was specified
+ time = time_double(time)
+
+ # get the time range if a time & window were specified instead
+ if trange is None and window is not None:
+ if center_time:
+ trange = [time - window/2.0, time + window/2.0]
+ else:
+ trange = [time, time + window]
+
+ # if no time range or window was specified then get a time range
+ # from the N closest samples to the specified time
+ # (defaults to 1 if SAMPLES is not defined)
+ if trange is None:
+ trange = slice2d_nearest(dists, time, samples)
+
+ # check that there is data in the trange before proceeding
+ times_ind = slice2d_intrange(dists, trange)
+
+ n_samples = len(times_ind)
+
+ if n_samples < 1:
+ print('No particle data in the time window; ')
+ print('Time samples may be at low cadence; try adjusting the time window.')
+ return
+
+ print(str(n_samples) + ' samples in time window')
+
+ nns = np.nanmin(times_ind)
+ nne = np.nanmax(times_ind)
+
+ # check support data
+ bfield = slice2d_get_support(mag_data, trange)
+ vbulk = slice2d_get_support(vel_data, trange)
+
+ if bfield is None:
+ print('Magnetic field data needed to calculate pitch-angles')
+ return
+ if vbulk is None and subtract_bulk:
+ print('Velocity data needed to subtract bulk velocity.')
+ return
+
+ species = dists[0]['species']
+ if units == 'eflux':
+ if species == 'i':
+ A = 1 # H+
+ elif species == 'e':
+ A = 1.0/1836.0
+
+ flux_to_df = A**2 * 0.5447*1e6
+ cm_to_km = 1e30
+ in_u = np.array([2, -1, 0]) # units_in = 'df_km'
+ out_u = np.array([0, 0, 0]) # units_out = 'eflux'
+ exp = in_u + out_u
+
+
+ # pitch angle bins
+ kmax = nbin
+ pamin = 0.0
+ pamax = 180.0
+ dpa = (pamax-pamin)/kmax # bin size
+ wpa = pamin + np.arange(kmax)*dpa + 0.5*dpa # bin center values
+ pa_bin = np.append(pamin + np.arange(kmax)*dpa, pamax)
+
+ # azimuthal bins
+ amax = 16
+ azmin = 0.
+ azmax = 180.
+ daz = (azmax-azmin)/amax
+ waz = azmin + np.arange(amax) * daz + 0.5 * daz
+ az_bin = np.append(azmin + np.arange(amax) * daz, azmax)
+
+ # polar bins
+ pmax = 16
+ polmin = 0.
+ polmax = 360.
+ dpol = (polmax - polmin)/pmax
+ wpol = polmin + np.arange(pmax) * dpol + 0.5 * dpol
+ pol_bin = np.append(polmin + np.arange(pmax) * dpol, polmax)
+
+ # PA (az x pol)
+ pa_azpol = np.zeros((amax, pmax))
+ pa_azpol[:, :] = np.nan
+
+ # energy bins
+ wegy = dists[0]['energy'][:, 0, 0]
+ if subtract_bulk:
+ wegy = np.append(2 * wegy[0] - wegy[1], wegy)
+
+ jmax = len(wegy)
+ egy_bin = 0.5*(wegy + shift(wegy, -1))
+ egy_bin[jmax - 1] = 2. * wegy[jmax - 1] - egy_bin[jmax - 2]
+ egy_bin0 = 2. * wegy[0] - egy_bin[0]
+ if egy_bin0 < 0:
+ egy_bin0 = 0
+ egy_bin = np.append(egy_bin0, egy_bin)
+
+ # prep
+ pad = np.zeros((jmax, kmax))
+ mmax = 4
+ f_dat = np.zeros((jmax, mmax)) # Four elements for para, perp, anti-para and omni directions
+ f_psd = np.zeros((jmax, mmax))
+ f_err = np.zeros((jmax, mmax))
+ f_cnt = np.zeros((jmax, mmax))
+ count_pad = np.zeros((jmax, kmax))
+ count_dat = np.zeros((jmax, mmax))
+
+ # magnetic field and bulk velocity
+ bnrm_avg = [0., 0., 0.]
+ babs_avg = 0.
+ vbulk_avg = [0.0, 0.0, 0.0]
+ vbulk_para = 0.0
+ vbulk_perp = 0.0
+ vbulk_vxb = 0.0
+ vbulk_exb = 0.0
+
+ # main loop
+ iecl = 0
+ iecm = 0
+
+ for n in np.arange(nns, nne+1):
+ data = moka_mms_clean_data(dists[n], units=units)
+
+ # magnetic field direction
+ tr = [dists[n]['start_time'], dists[n]['end_time']]
+ bfield = tplot_average(mag_data, tr, quiet=True)
+ babs = np.sqrt(bfield[0]**2 + bfield[1]**2 + bfield[2]**2)
+ bnrm = bfield/babs
+ bnrm_avg += bnrm
+ babs_avg += babs
+
+ # bulk velocity
+ if not subtract_bulk:
+ vbulk = np.array([0.0, 0.0, 0.0])
+
+ vbpara = bnrm[0]*vbulk[0]+bnrm[1]*vbulk[1]+bnrm[2]*vbulk[2]
+ vbperp = vbulk - vbpara
+ vbperp_abs = np.sqrt(vbperp[0]**2+vbperp[1]**2+vbperp[2]**2)
+ vxb = np.array([-vbulk[1]*bnrm[2]+vbulk[2]*bnrm[1], -vbulk[2]*bnrm[0]+vbulk[0]*bnrm[2], -vbulk[0]*bnrm[1]+vbulk[1]*bnrm[0]])
+ vxbabs = np.sqrt(vxb[0]**2+vxb[1]**2+vxb[2]**2)
+ vxbnrm = vxb/vxbabs
+ exb = np.array([vxb[1]*bnrm[2]-vxb[2]*bnrm[1],vxb[2]*bnrm[0]-vxb[0]*bnrm[2],vxb[0]*bnrm[1]-vxb[1]*bnrm[0]])
+ exbabs = np.sqrt(exb[0]**2+exb[1]**2+exb[2]**2)
+ exbnrm = exb/exbabs
+ vbulk_para += vbpara
+ vbulk_perp += vbperp_abs
+ vbulk_vxb += vxbnrm[0]*vbulk[0]+vxbnrm[1]*vbulk[1]+vxbnrm[2]*vbulk[2]
+ vbulk_exb += exbnrm[0]*vbulk[0]+exbnrm[1]*vbulk[1]+exbnrm[2]*vbulk[2]
+ vbulk_avg += vbulk
+
+ # particle velocities & pitch angles
+
+ # spherical to cartesian
+ erest = data['mass']*const_c**2/1e6 # convert mass from eV/(km/s)^2 to eV
+ vabs = const_c*np.sqrt(1 - 1/((data['energy']/erest + 1)**2))/1000.0
+ vdata = slice2d_s2c(vabs, data['theta'], data['phi'])
+ vx = vdata[:, 0]
+ vy = vdata[:, 1]
+ vz = vdata[:, 2]
+
+ if subtract_bulk:
+ vx -= vbulk[0]
+ vy -= vbulk[1]
+ vz -= vbulk[2]
+
+ # pitch angles
+ dp = (bnrm[0]*vx + bnrm[1]*vy + bnrm[2]*vz)/np.sqrt(vx**2+vy**2+vz**2)
+ dp[dp > 1.0] = 1.0
+ dp[dp < -1.0] = -1.0
+ pa = rd*np.arccos(dp)
+
+ # Cartesian to spherical
+ vnew, theta, phi = cart_to_sphere(vx, vy, vz)
+ data['energy'] = erest*(1.0/np.sqrt(1.0-(vnew*1000.0/const_c)**2)-1.0) # eV
+ data['phi'] = phi
+ data['theta'] = theta
+ data['pa'] = pa
+
+ azang = 90.0 - data['theta']
+
+ imax = len(data['data_dat'])
+
+ for i in np.arange(0, imax):
+ # find energy bin
+ j = np.nanargmin(np.abs(egy_bin-data['energy'][i]))
+ if egy_bin[j] > data['energy'][i]:
+ j -= 1
+ if j == jmax:
+ j -= 1
+
+ # find pitch-angle bin
+ k = np.nanargmin(np.abs(pa_bin-data['pa'][i]))
+ if pa_bin[k] > data['pa'][i]:
+ k -= 1
+ if k == kmax:
+ k -= 1
+
+ # find azimuthal bin
+ a = np.nanargmin(np.abs(az_bin-azang[i]))
+ if az_bin[a] > azang[i]:
+ a -= 1
+ if a == amax:
+ a -= 1
+
+ # find polar bin
+ p = np.nanargmin(np.abs(pol_bin-data['phi'][i]))
+ if pol_bin[p] > data['phi'][i]:
+ p -= 1
+ if p == pmax:
+ p -= 1
+
+ if j >= 0:
+ pa_azpol[a, p] = data['pa'][i]
+
+ # find new eflux
+ # If shifted to plasma rest-frame, 'eflux' should be re-evaluated
+ # from 'psd' because 'eflux' depends on the particle energy. We don't need to
+ # worry about this if we want the output in 'psd'.
+ newenergy = wegy[j]
+ if units == 'eflux':
+ newdat = data['data_psd'][i]*newenergy**exp[0]*(flux_to_df**exp[1]*cm_to_km**exp[2])
+ newpsd = newdat
+ newerr = data['data_err'][i]*newenergy**exp[0]*(flux_to_df**exp[1]*cm_to_km**exp[2])
+ else:
+ newdat = data['data_dat'][i]
+ newpsd = data['data_psd'][i]
+ newerr = data['data_err'][i]
+
+ pad[j, k] += newdat
+ count_pad[j, k] += 1
+
+ # energy spectrum (para, perp, anti-para)
+ m = -1
+ if (pr__90[0] <= data['pa'][i]) and (data['pa'][i] <= pr__90[1]):
+ m = 1
+ else:
+ if (pr___0[0] <= data['pa'][i]) and (data['pa'][i] <= pr___0[1]):
+ m = 0
+ if (pr_180[0] <= data['pa'][i]) and (data['pa'][i] <= pr_180[1]):
+ m = 2
+
+ if (m >= 0) and (m <= 2):
+ f_dat[j, m] += newdat
+ f_psd[j, m] += newpsd
+ f_err[j, m] += newerr
+ f_cnt[j, m] += data['data_cnt'][i]
+ count_dat[j, m] += 1
+
+ # energy spectrum (omni-direction)
+ m = 3
+ f_dat[j, m] += newdat
+ f_psd[j, m] += newpsd
+ f_err[j, m] += newerr
+ f_cnt[j, m] += data['data_cnt'][i]
+ count_dat[j, m] += 1
+ else:
+ iecl += 1
+ else:
+ iecm += imax
+
+ pad /= count_pad
+ f_dat /= count_dat
+ f_psd /= count_dat
+ f_err /= count_dat
+ f_cnt /= count_dat
+
+ vbulk_para /= float(n_samples)
+ vbulk_perp /= float(n_samples)
+ vbulk_vxb /= float(n_samples)
+ vbulk_exb /= float(n_samples)
+ vbulk_avg /= float(n_samples)
+
+ bnrm_avg /= float(n_samples)
+ babs_avg /= float(n_samples)
+
+ pad = np.nan_to_num(pad, nan=0)
+
+ # angle padding
+ padnew = np.zeros((jmax, kmax+2))
+ padnew[0:jmax, 1:kmax+1] = pad
+ padnew[0:jmax, 0] = padnew[0:jmax, 1]
+ padnew[0:jmax, kmax + 1] = padnew[0:jmax, kmax]
+ pad = padnew
+ wpa_new = np.append(wpa[0] - dpa, wpa)
+ wpa_new = np.append(wpa_new, wpa[kmax - 1] + dpa)
+ wpa = wpa_new
+
+ # normalize
+ padnorm = deepcopy(pad)
+ for j in range(0, jmax):
+ peak = np.nanmax(pad[j, 0:kmax]) # find the peak
+ if peak == 0:
+ padnorm[j, 0:kmax] = 0.0
+ else:
+ padnorm[j, 0:kmax] /= peak
+
+ if norm:
+ pad = padnorm
+
+ # Effective one-count-level
+ # 'f_psd' is the PSD averaged over time and angular ranges.
+ # 'f_cnt' is the counts averaged over time and angular ranges.
+ # 'count_dat is the total number of time and angular bins.
+ if oclreal:
+ f_ocl = f_psd/f_cnt
+ else:
+ f_ocl = f_psd/(f_cnt*count_dat)
+
+ # output
+ return {'trange': trange,
+ 'egy': wegy,
+ 'pa': wpa,
+ 'data': pad,
+ 'datanorm': padnorm,
+ 'numSlices': n_samples,
+ 'nbin': kmax,
+ 'units': units,
+ 'subtract_bulk': subtract_bulk,
+ 'egyrange': [np.nanmin(wegy), np.nanmax(wegy)],
+ #'parange': [np.nanmin(wpa), np.nanmax(wpa)],
+ 'spec___0': f_psd[:, 0],
+ 'spec__90': f_psd[:, 1],
+ 'spec_180': f_psd[:, 2],
+ 'spec_omn': f_psd[:, 3],
+ 'cnts___0': f_cnt[:, 0],
+ 'cnts__90': f_cnt[:, 1],
+ 'cnts_180': f_cnt[:, 2],
+ 'cnts_omn': f_cnt[:, 3],
+ 'oclv___0': f_ocl[:, 0],
+ 'oclv__90': f_ocl[:, 1],
+ 'oclv_180': f_ocl[:, 2],
+ 'oclv_omn': f_ocl[:, 3],
+ 'eror___0': f_err[:, 0],
+ 'eror__90': f_err[:, 1],
+ 'eror_180': f_err[:, 2],
+ 'eror_omn': f_err[:, 3],
+ 'vbulk_para': vbulk_para,
+ 'vbulk_perp_abs': vbulk_perp,
+ 'vbulk_vxb': vbulk_vxb,
+ 'vbulk_exb': vbulk_exb,
+ 'bnrm': bnrm_avg,
+ 'Vbulk': vbulk_avg,
+ 'bfield': bnrm_avg * babs_avg,
+ 'species': species,
+ 'pa_azpol': pa_azpol,
+ 'wpol': wpol,
+ 'waz': waz}
+
+
+def cart_to_sphere(x, y, z):
+ rho = x * x + y * y
+ r = np.sqrt(rho + z * z)
+ phi = 18e1/np.pi * np.arctan2(y, x)
+ # should be between 0-360
+ phi[phi < 0] += 360.0
+ theta = 18e1/np.pi * np.arctan(z / np.sqrt(rho))
+ return (r, theta, phi)
\ No newline at end of file
diff --git a/pyspedas/mms/fsm/fsm.py b/pyspedas/mms/fsm/fsm.py
index 3fd4e763..7e2989a5 100644
--- a/pyspedas/mms/fsm/fsm.py
+++ b/pyspedas/mms/fsm/fsm.py
@@ -1,18 +1,19 @@
from pyspedas.mms.mms_load_data import mms_load_data
from pyspedas.mms.print_vars import print_vars
+
@print_vars
def mms_load_fsm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='brst',
level='l3', datatype='8khz', get_support_data=False, time_clip=False, no_update=False,
available=False, varformat=None, varnames=[], notplot=False, suffix='', latest_version=False,
major_version=False, min_version=None, cdf_version=None, spdf=False, always_prompt=False):
"""
- This function loads FSM data into tplot variables
+ This function loads FSM (FGM + SCM) data into tplot variables
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -51,11 +52,11 @@ def mms_load_fsm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='brst
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -75,12 +76,13 @@ def mms_load_fsm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='brst
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ -----------
List of tplot variables created.
"""
@@ -88,4 +90,4 @@ def mms_load_fsm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='brst
level=level, instrument='fsm', datatype=datatype, get_support_data=get_support_data, time_clip=time_clip,
no_update=no_update, available=available, suffix=suffix, latest_version=latest_version, varnames=varnames,
major_version=major_version, min_version=min_version, cdf_version=cdf_version, spdf=spdf, always_prompt=always_prompt)
- return tvars
\ No newline at end of file
+ return tvars
diff --git a/pyspedas/mms/hpca/hpca.py b/pyspedas/mms/hpca/hpca.py
index 227a3e92..f681c2cd 100644
--- a/pyspedas/mms/hpca/hpca.py
+++ b/pyspedas/mms/hpca/hpca.py
@@ -1,4 +1,3 @@
-
import numpy as np
import logging
import re
@@ -8,12 +7,12 @@
from pyspedas.mms.hpca.mms_hpca_energies import mms_hpca_energies
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
-
-from pytplot import get_data, store_data
+from pytplot import get_data, store_data, get
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
@print_vars
def mms_load_hpca(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy',
level='l2', datatype='moments', get_support_data=None, time_clip=False, no_update=False,
@@ -21,12 +20,12 @@ def mms_load_hpca(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
latest_version=False, major_version=False, min_version=None, cdf_version=None, spdf=False,
always_prompt=False):
"""
- This function loads HPCA data into tplot variables
+ Load data from the Hot Plasma Composition Analyzer (HPCA)
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -71,11 +70,11 @@ def mms_load_hpca(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -95,16 +94,16 @@ def mms_load_hpca(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ -----------
List of tplot variables created.
"""
-
if level.lower() != 'l2':
if varformat is None:
if level.lower() != 'l1a':
@@ -176,8 +175,6 @@ def mms_load_hpca(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
else:
theta = theta_data
- store_data(tvar, data={'x': df_data.times, 'y': df_data.y, 'v1': theta, 'v2': df_data.v2}, attr_dict=df_metadata)
-
# check if energy table contains all 0s
zerocheck = np.argwhere(df_data.v2 == 0.0)
if len(zerocheck) == 63:
@@ -185,5 +182,10 @@ def mms_load_hpca(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
energy_table = mms_hpca_energies()
logging.warning('Found energy table with all 0s: ' + tvar + '; using hard-coded energy table instead')
store_data(tvar, data={'x': df_data.times, 'y': df_data.y, 'v1': theta, 'v2': energy_table}, attr_dict=df_metadata)
+ else:
+ store_data(tvar, data={'x': df_data.times, 'y': df_data.y, 'v1': theta, 'v2': df_data.v2},
+ attr_dict=df_metadata)
+ metadata = get(tvar, metadata=True)
+ metadata['data_att']['depend_1_units'] = 'deg'
return tvars
diff --git a/pyspedas/mms/hpca/mms_get_hpca_dist.py b/pyspedas/mms/hpca/mms_get_hpca_dist.py
index 11366c37..a1e5d520 100644
--- a/pyspedas/mms/hpca/mms_get_hpca_dist.py
+++ b/pyspedas/mms/hpca/mms_get_hpca_dist.py
@@ -1,4 +1,3 @@
-
import logging
import numpy as np
from pytplot import get_data
@@ -6,6 +5,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_get_hpca_dist(tname, index=None, probe=None, data_rate=None, species=None, times=False):
"""
Returns 3D particle data structures containing MMS HPCA
@@ -33,10 +33,10 @@ def mms_get_hpca_dist(tname, index=None, probe=None, data_rate=None, species=Non
times: bool
Flag to return the distribution times instead of the particle data structure(s)
- Returns:
+ Returns
+ ------------
3D particle data structure(s) containing MMS HPCA distribution functions
"""
-
data_in = get_data(tname)
# Match particle data to azimuth data
@@ -68,7 +68,6 @@ def mms_get_hpca_dist(tname, index=None, probe=None, data_rate=None, species=Non
logging.error('Azimuth data does not cover current data\'s time range')
return
-
# filter times when azimuth data is all zero
# -just check the first energy & elevation
# -assume azimuth values are positive
@@ -199,7 +198,10 @@ def mms_get_hpca_dist(tname, index=None, probe=None, data_rate=None, species=Non
else:
end_idx = int(data_idx[i]+n_times/2.)
- out_data[i, :, :, :] = data_in.y[start_idx:end_idx, :, :].transpose([2, 0, 1])
+ try:
+ out_data[i, :, :, :] = data_in.y[start_idx:end_idx, :, :].transpose([2, 0, 1])
+ except ValueError:
+ out_data[i, :, :, :] = np.nan
out_list = []
@@ -227,8 +229,3 @@ def mms_get_hpca_dist(tname, index=None, probe=None, data_rate=None, species=Non
out_list.append(out_table)
return out_list
-
-
-
-
-
diff --git a/pyspedas/mms/hpca/mms_get_hpca_info.py b/pyspedas/mms/hpca/mms_get_hpca_info.py
index dec73d12..edf37918 100644
--- a/pyspedas/mms/hpca/mms_get_hpca_info.py
+++ b/pyspedas/mms/hpca/mms_get_hpca_info.py
@@ -2,7 +2,6 @@
def mms_get_hpca_info():
"""
Returns structure containing hpca look directions, energies, and other info.
-
"""
forward_anodes = [14,15,0,1,2,3,4,5]
diff --git a/pyspedas/mms/hpca/mms_hpca_calc_anodes.py b/pyspedas/mms/hpca/mms_hpca_calc_anodes.py
index d2188747..686e217d 100644
--- a/pyspedas/mms/hpca/mms_hpca_calc_anodes.py
+++ b/pyspedas/mms/hpca/mms_hpca_calc_anodes.py
@@ -1,6 +1,7 @@
from pyspedas import tnames
from pytplot import options, get_data, store_data
+
def mms_hpca_elevations():
anode_theta = [123.75000, 101.25000, 78.750000, 56.250000, 33.750000,
11.250000, 11.250000, 33.750000, 56.250000, 78.750000,
@@ -9,20 +10,24 @@ def mms_hpca_elevations():
anode_theta[6:14] = [anode_val+180. for anode_val in anode_theta[6:14]]
return anode_theta
+
def mms_hpca_anodes(fov=[0, 360]):
anodes = mms_hpca_elevations()
return [i for i, anode in enumerate(anodes) if anode >= float(fov[0]) and anode <= float(fov[1])]
+
def mms_hpca_sum_fov(times, data, angles, energies, fov=[0, 360], anodes=None):
anodes_in_fov = mms_hpca_anodes(fov=fov)
data_within_fov = data[:,anodes_in_fov,:]
return data_within_fov.sum(axis=1)
+
def mms_hpca_avg_fov(times, data, angles, energies, fov=[0, 360], anodes=None):
anodes_in_fov = mms_hpca_anodes(fov=fov)
data_within_fov = data[:,anodes_in_fov,:]
return data_within_fov.mean(axis=1)
+
def mms_hpca_calc_anodes(fov=[0, 360], probe='1', suffix=''):
"""
This function will sum (or average, for flux) the HPCA data over the requested field-of-view (fov)
@@ -87,4 +92,3 @@ def mms_hpca_calc_anodes(fov=[0, 360], probe='1', suffix=''):
options(var+fov_str, 'Colormap', 'spedas')
output_vars.append(var+fov_str)
return output_vars
-
diff --git a/pyspedas/mms/hpca/mms_hpca_energies.py b/pyspedas/mms/hpca/mms_hpca_energies.py
index b058674b..9e65535a 100644
--- a/pyspedas/mms/hpca/mms_hpca_energies.py
+++ b/pyspedas/mms/hpca/mms_hpca_energies.py
@@ -1,6 +1,6 @@
-
import numpy as np
+
def mms_hpca_energies():
return np.array([1.35500, 1.57180, 1.84280, 2.22220, 2.60160, 3.08940, 3.63140, 4.28180,
5.04060, 5.96200, 6.99180, 8.23840, 9.75600, 11.4904, 13.5500, 15.9890,
@@ -9,4 +9,4 @@ def mms_hpca_energies():
264.984, 312.571, 368.723, 434.955, 513.057, 605.197, 713.868, 842.051,
993.323, 1171.70, 1382.10, 1630.28, 1923.07, 2268.43, 2675.80, 3156.28,
3723.11, 4391.72, 5180.44, 6110.72, 7208.11, 8502.57, 10029.5, 11830.6,
- 13955.2, 16461.4, 19417.5, 22904.6, 27017.9, 31869.8, 37593.1])
\ No newline at end of file
+ 13955.2, 16461.4, 19417.5, 22904.6, 27017.9, 31869.8, 37593.1])
diff --git a/pyspedas/mms/hpca/mms_hpca_set_metadata.py b/pyspedas/mms/hpca/mms_hpca_set_metadata.py
index 442935e2..5cfada24 100644
--- a/pyspedas/mms/hpca/mms_hpca_set_metadata.py
+++ b/pyspedas/mms/hpca/mms_hpca_set_metadata.py
@@ -1,13 +1,14 @@
-
from pytplot import options
from pyspedas import tnames
+
def mms_hpca_set_metadata(probe='1', fov=[0, 360], suffix=''):
"""
This function sets the plot metadata for HPCA data products, and is meant
to be called from the HPCA load routine
- Parameters:
+ Parameters
+ ------------
fov : list of int
field of view, in angles, from 0-360
@@ -17,7 +18,8 @@ def mms_hpca_set_metadata(probe='1', fov=[0, 360], suffix=''):
suffix: str
suffix of the loaded data
- Returns:
+ Returns
+ ------------
None
"""
prefix = 'mms'+str(probe)
@@ -35,23 +37,18 @@ def mms_hpca_set_metadata(probe='1', fov=[0, 360], suffix=''):
for var in tnames(valid_vel):
if var == prefix+'_hpca_hplus_ion_bulk_velocity'+suffix:
options(var, 'legend_names', ['Vx (H+)', 'Vy (H+)', 'Vz (H+)'])
- options(var, 'color', ['b', 'g', 'r'])
options(var, 'ytitle', 'H+ velocity')
if var == prefix+'_hpca_heplus_ion_bulk_velocity'+suffix:
options(var, 'legend_names', ['Vx (He+)', 'Vy (He+)', 'Vz (He+)'])
- options(var, 'color', ['b', 'g', 'r'])
options(var, 'ytitle', 'He+ velocity')
if var == prefix+'_hpca_heplusplus_ion_bulk_velocity'+suffix:
options(var, 'legend_names', ['Vx (He++)', 'Vy (He++)', 'Vz (He++)'])
- options(var, 'color', ['b', 'g', 'r'])
options(var, 'ytitle', 'He++ velocity')
if var == prefix+'_hpca_oplus_ion_bulk_velocity'+suffix:
options(var, 'legend_names', ['Vx (O+)', 'Vy (O+)', 'Vz (O+)'])
- options(var, 'color', ['b', 'g', 'r'])
options(var, 'ytitle', 'O+ velocity')
if var == prefix+'_hpca_oplusplus_ion_bulk_velocity'+suffix:
options(var, 'legend_names', ['Vx (O++)', 'Vy (O++)', 'Vz (O++)'])
- options(var, 'color', ['b', 'g', 'r'])
options(var, 'ytitle', 'O++ velocity')
for var in tnames(valid_temp):
@@ -60,5 +57,3 @@ def mms_hpca_set_metadata(probe='1', fov=[0, 360], suffix=''):
if var == prefix+'_hpca_heplusplus_scalar_temperature'+suffix: options(var, 'ytitle', 'He++ temp')
if var == prefix+'_hpca_oplus_scalar_temperature'+suffix: options(var, 'ytitle', 'O+ temp')
if var == prefix+'_hpca_oplusplus_scalar_temperature'+suffix: options(var, 'ytitle', 'O++ temp')
-
-
diff --git a/pyspedas/mms/hpca/mms_hpca_spin_sum.py b/pyspedas/mms/hpca/mms_hpca_spin_sum.py
index 791c7ab4..dd041c5e 100644
--- a/pyspedas/mms/hpca/mms_hpca_spin_sum.py
+++ b/pyspedas/mms/hpca/mms_hpca_spin_sum.py
@@ -2,6 +2,7 @@
from pytplot import get_data, store_data, options
from pyspedas import tnames
+
def mms_hpca_spin_sum(probe='1', datatypes=None, species=['hplus', 'oplus', 'oplusplus', 'heplus', 'heplusplus'], fov=['0', '360'], avg=False, suffix=''):
"""
This function will sum (or average, when the avg keyword is set to True) the HPCA data over each spin
@@ -64,9 +65,8 @@ def mms_hpca_spin_sum(probe='1', datatypes=None, species=['hplus', 'oplus', 'opl
options(var+'_spin', 'spec', True)
options(var+'_spin', 'ylog', True)
options(var+'_spin', 'zlog', True)
- options(var+'_spin', 'Colormap', 'spedas')
- options(var+'_spin', 'ztitle', species_map[var_species] + ' ' + var.split('_')[3] + ' (cm^2-s-sr-eV)^-1')
+ options(var+'_spin', 'ztitle', species_map[var_species] + ' ' + var.split('_')[3] + ' 1/(cm^2-s-sr-eV)')
options(var+'_spin', 'ytitle', species_map[var_species])
options(var+'_spin', 'ysubtitle', 'Energy [eV]')
output_vars.append(var+'_spin')
- return output_vars
\ No newline at end of file
+ return output_vars
diff --git a/pyspedas/mms/mec/mec.py b/pyspedas/mms/mec/mec.py
index f8c241c7..cf3be482 100644
--- a/pyspedas/mms/mec/mec.py
+++ b/pyspedas/mms/mec/mec.py
@@ -3,18 +3,19 @@
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
+
@print_vars
def mms_load_mec(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy',
level='l2', datatype='epht89q', varformat=None, varnames=[], suffix='', get_support_data=False,
time_clip=False, no_update=False, available=False, notplot=False, latest_version=False,
major_version=False, min_version=None, cdf_version=None, spdf=False, always_prompt=False):
"""
- This function loads MEC data into tplot variables
+ Load the attitude/ephemeris data from the LANL MEC files
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -54,11 +55,11 @@ def mms_load_mec(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -78,18 +79,20 @@ def mms_load_mec(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Notes:
+ Notes
+ ---------
The default datatype was changed to 'epht89q' on 15Nov2021. There are sometimes issues with
creating the Tsyganenko 04 data products, which leads to the 'epht04d' files not being available.
The 'epht89d' files contain the same ephemeris data - the only difference are the data products
that rely on the field model.
- Returns:
+ Returns
+ -----------
List of tplot variables created.
"""
diff --git a/pyspedas/mms/mec/mms_mec_set_metadata.py b/pyspedas/mms/mec/mms_mec_set_metadata.py
index 494274c3..7ddd3f19 100644
--- a/pyspedas/mms/mec/mms_mec_set_metadata.py
+++ b/pyspedas/mms/mec/mms_mec_set_metadata.py
@@ -1,6 +1,7 @@
from pytplot import options
from pyspedas import tnames
+
def mms_mec_set_metadata(probe, data_rate, level, suffix=''):
"""
This function updates the metadata for MEC data products
@@ -22,7 +23,8 @@ def mms_mec_set_metadata(probe, data_rate, level, suffix=''):
no suffix is added.
"""
- if not isinstance(probe, list): probe = [probe]
+ if not isinstance(probe, list):
+ probe = [probe]
instrument = 'mec'
@@ -31,51 +33,38 @@ def mms_mec_set_metadata(probe, data_rate, level, suffix=''):
for this_probe in probe:
if 'mms'+str(this_probe)+'_'+instrument+'_r_eci'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_r_eci'+suffix, 'ytitle', 'MMS'+str(this_probe)+' position')
- options('mms'+str(this_probe)+'_'+instrument+'_r_eci'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_r_eci'+suffix, 'legend_names', ['X ECI', 'Y ECI', 'Z ECI'])
if 'mms'+str(this_probe)+'_'+instrument+'_r_gsm'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_r_gsm'+suffix, 'ytitle', 'MMS'+str(this_probe)+' position')
- options('mms'+str(this_probe)+'_'+instrument+'_r_gsm'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_r_gsm'+suffix, 'legend_names', ['X GSM', 'Y GSM', 'Z GSM'])
if 'mms'+str(this_probe)+'_'+instrument+'_r_geo'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_r_geo'+suffix, 'ytitle', 'MMS'+str(this_probe)+' position')
- options('mms'+str(this_probe)+'_'+instrument+'_r_geo'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_r_geo'+suffix, 'legend_names', ['X GEO', 'Y GEO', 'Z GEO'])
if 'mms'+str(this_probe)+'_'+instrument+'_r_sm'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_r_sm'+suffix, 'ytitle', 'MMS'+str(this_probe)+' position')
- options('mms'+str(this_probe)+'_'+instrument+'_r_sm'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_r_sm'+suffix, 'legend_names', ['X SM', 'Y SM', 'Z SM'])
if 'mms'+str(this_probe)+'_'+instrument+'_r_gse'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_r_gse'+suffix, 'ytitle', 'MMS'+str(this_probe)+' position')
- options('mms'+str(this_probe)+'_'+instrument+'_r_gse'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_r_gse'+suffix, 'legend_names', ['X GSE', 'Y GSE', 'Z GSE'])
if 'mms'+str(this_probe)+'_'+instrument+'_r_gse2000'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_r_gse2000'+suffix, 'ytitle', 'MMS'+str(this_probe)+' position')
- options('mms'+str(this_probe)+'_'+instrument+'_r_gse2000'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_r_gse2000'+suffix, 'legend_names', ['X GSE2000', 'Y GSE2000', 'Z GSE2000'])
-
if 'mms'+str(this_probe)+'_'+instrument+'_v_eci'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_v_eci'+suffix, 'ytitle', 'MMS'+str(this_probe)+' velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_v_eci'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_v_eci'+suffix, 'legend_names', ['Vx ECI', 'Vy ECI', 'Vz ECI'])
if 'mms'+str(this_probe)+'_'+instrument+'_v_gsm'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_v_gsm'+suffix, 'ytitle', 'MMS'+str(this_probe)+' velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_v_gsm'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_v_gsm'+suffix, 'legend_names', ['Vx GSM', 'Vy GSM', 'Vz GSM'])
if 'mms'+str(this_probe)+'_'+instrument+'_v_geo'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_v_geo'+suffix, 'ytitle', 'MMS'+str(this_probe)+' velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_v_geo'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_v_geo'+suffix, 'legend_names', ['Vx GEO', 'Vy GEO', 'Vz GEO'])
if 'mms'+str(this_probe)+'_'+instrument+'_v_sm'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_v_sm'+suffix, 'ytitle', 'MMS'+str(this_probe)+' velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_v_sm'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_v_sm'+suffix, 'legend_names', ['Vx SM', 'Vy SM', 'Vz SM'])
if 'mms'+str(this_probe)+'_'+instrument+'_v_gse'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_v_gse'+suffix, 'ytitle', 'MMS'+str(this_probe)+' velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_v_gse'+suffix, 'color', ['b', 'g', 'r'])
options('mms'+str(this_probe)+'_'+instrument+'_v_gse'+suffix, 'legend_names', ['Vx GSE', 'Vy GSE', 'Vz GSE'])
if 'mms'+str(this_probe)+'_'+instrument+'_v_gse2000'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_v_gse2000'+suffix, 'ytitle', 'MMS'+str(this_probe)+' velocity')
- options('mms'+str(this_probe)+'_'+instrument+'_v_gse2000'+suffix, 'color', ['b', 'g', 'r'])
- options('mms'+str(this_probe)+'_'+instrument+'_v_gse2000'+suffix, 'legend_names', ['Vx GSE2000', 'Vy GSE2000', 'Vz GSE2000'])
\ No newline at end of file
+ options('mms'+str(this_probe)+'_'+instrument+'_v_gse2000'+suffix, 'legend_names', ['Vx GSE2000', 'Vy GSE2000', 'Vz GSE2000'])
diff --git a/pyspedas/mms/mec_ascii/mms_get_local_state_files.py b/pyspedas/mms/mec_ascii/mms_get_local_state_files.py
index 893cca36..fd7a1f1c 100644
--- a/pyspedas/mms/mec_ascii/mms_get_local_state_files.py
+++ b/pyspedas/mms/mec_ascii/mms_get_local_state_files.py
@@ -1,13 +1,11 @@
import os
import fnmatch
import glob
-import re
import logging
import pandas as pd
-
-from pyspedas import time_double
from pyspedas.mms.mms_config import CONFIG
+
def mms_get_local_state_files(probe='1', level='def', filetype='eph', trange=None):
"""
Search for local state MMS files in case a list cannot be retrieved from the
@@ -15,7 +13,7 @@ def mms_get_local_state_files(probe='1', level='def', filetype='eph', trange=Non
Parameters:
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -46,23 +44,28 @@ def mms_get_local_state_files(probe='1', level='def', filetype='eph', trange=Non
# and FILETYPE is either DEFATT, PREDATT, DEFEPH, PREDEPH in uppercase
# and start/endDate is YYYYDOY
# and version is Vnn (.V00, .V01, etc..)
- dir_pattern = os.sep.join([CONFIG['local_data_dir'], 'ancillary', 'mms'+probe, level+filetype])
- file_pattern = 'MMS'+probe+'_'+level.upper()+filetype.upper()+'_'+'???????_???????.V??'
+ dir_pattern = os.sep.join([CONFIG['local_data_dir'], 'ancillary', f'mms{probe}', f'{level}{filetype}'])
+ file_pattern = f'MMS{probe}_{level.upper()}{filetype.upper()}_???????_???????.V??'
files_in_trange = []
out_files = []
files = glob.glob(os.sep.join([dir_pattern, file_pattern]))
- # find the files within the trange
- file_regex = re.compile(os.sep.join([dir_pattern, 'MMS'+probe+'_'+level.upper()+filetype.upper()+'_([0-9]{7})_([0-9]{7}).V[0-9]{2}']))
for file in files:
- time_match = file_regex.match(file)
- if time_match != None:
- start_time = pd.to_datetime(time_match.group(1), format='%Y%j').timestamp()
- end_time = pd.to_datetime(time_match.group(2), format='%Y%j').timestamp()
- if start_time < time_double(trange[1]) and end_time >= time_double(trange[0]):
+ filename = os.path.basename(file)
+ try:
+ date_parts = filename.split('_')
+ start_time_str = date_parts[2]
+ end_time_str = date_parts[3].split('.')[0]
+
+ start_time = pd.to_datetime(start_time_str, format='%Y%j').timestamp()
+ end_time = pd.to_datetime(end_time_str, format='%Y%j').timestamp()
+
+ if start_time < pd.Timestamp(trange[1]).timestamp() and end_time >= pd.Timestamp(trange[0]).timestamp():
files_in_trange.append(file)
+ except IndexError:
+ continue
# ensure only the latest version of each file is loaded
for file in files_in_trange:
@@ -73,4 +76,4 @@ def mms_get_local_state_files(probe='1', level='def', filetype='eph', trange=Non
else:
out_files.append(versions[0])
- return list(set(out_files))
\ No newline at end of file
+ return list(set(out_files))
diff --git a/pyspedas/mms/mec_ascii/mms_get_state_data.py b/pyspedas/mms/mec_ascii/mms_get_state_data.py
index dc534ad5..8aa8834d 100644
--- a/pyspedas/mms/mec_ascii/mms_get_state_data.py
+++ b/pyspedas/mms/mec_ascii/mms_get_state_data.py
@@ -1,10 +1,8 @@
import os
import logging
import warnings
-
from shutil import copyfileobj, copy
from tempfile import NamedTemporaryFile
-
from pyspedas import time_double, time_string
from pyspedas.mms.mms_login_lasp import mms_login_lasp
from pyspedas.mms.mms_config import CONFIG
@@ -12,12 +10,12 @@
from pyspedas.mms.mec_ascii.mms_load_eph_tplot import mms_load_eph_tplot
from pyspedas.mms.mec_ascii.mms_load_att_tplot import mms_load_att_tplot
+
def mms_get_state_data(probe='1', trange=['2015-10-16', '2015-10-17'],
datatypes=['pos', 'vel'], level='def', no_download=False, pred_or_def=True,
suffix='', always_prompt=False):
"""
Helper routine for loading state data (ASCII files from the SDC); not meant to be called directly; see pyspedas.mms.state instead
-
"""
if not isinstance(probe, list): probe = [probe]
@@ -25,7 +23,6 @@ def mms_get_state_data(probe='1', trange=['2015-10-16', '2015-10-17'],
local_data_dir = CONFIG['local_data_dir']
download_only = CONFIG['download_only']
-
start_time = time_double(trange[0])-60*60*24.
end_time = time_double(trange[1])
@@ -136,7 +133,7 @@ def mms_get_state_data(probe='1', trange=['2015-10-16', '2015-10-17'],
continue
# if no files are found remotely, try locally
- if out_files == []:
+ if not out_files:
out_files = mms_get_local_state_files(probe=probe_id, level=level, filetype=filetype, trange=[start_time_str, end_time_str])
if filetype == 'eph':
diff --git a/pyspedas/mms/mec_ascii/mms_load_att_tplot.py b/pyspedas/mms/mec_ascii/mms_load_att_tplot.py
index 6ae79905..ae0b829c 100644
--- a/pyspedas/mms/mec_ascii/mms_load_att_tplot.py
+++ b/pyspedas/mms/mec_ascii/mms_load_att_tplot.py
@@ -1,15 +1,13 @@
-
import logging
import pandas as pd
import numpy as np
-
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import store_data
+
def mms_load_att_tplot(filenames, level='def', probe='1', datatypes=['spinras', 'spindec'], suffix='', trange=None):
"""
Helper routine for loading state data (ASCII files from the SDC); not meant to be called directly; see pyspedas.mms.state instead
-
"""
prefix = 'mms' + probe
@@ -61,4 +59,3 @@ def mms_load_att_tplot(filenames, level='def', probe='1', datatypes=['spinras',
if 'spindec' in datatypes:
store_data(prefix + '_' + level + 'att_spindec' + suffix, data={'x': file_times_uniq[0], 'y': file_ldecs_out})
tclip(prefix + '_' + level + 'att_spindec' + suffix, trange[0], trange[1], suffix='')
-
diff --git a/pyspedas/mms/mec_ascii/mms_load_eph_tplot.py b/pyspedas/mms/mec_ascii/mms_load_eph_tplot.py
index d00a34c1..1a19a0ac 100644
--- a/pyspedas/mms/mec_ascii/mms_load_eph_tplot.py
+++ b/pyspedas/mms/mec_ascii/mms_load_eph_tplot.py
@@ -1,15 +1,13 @@
-
import logging
import pandas as pd
import numpy as np
-
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import store_data, options
+
def mms_load_eph_tplot(filenames, level='def', probe='1', datatypes=['pos', 'vel'], suffix='', trange=None):
"""
Helper routine for loading state data (ASCII files from the SDC); not meant to be called directly; see pyspedas.mms.state instead
-
"""
prefix = 'mms' + probe
@@ -45,7 +43,6 @@ def mms_load_eph_tplot(filenames, level='def', probe='1', datatypes=['pos', 'vel
options(prefix + '_' + level + 'eph_pos' + suffix, 'ytitle', 'MMS'+str(probe)+' position')
options(prefix + '_' + level + 'eph_pos' + suffix, 'ysubtitle', '[km]')
options(prefix + '_' + level + 'eph_pos' + suffix, 'legend_names', ['X ECI', 'Y ECI', 'Z ECI'])
- options(prefix + '_' + level + 'eph_pos' + suffix, 'color', ['b', 'g', 'r'])
if 'vel' in datatypes:
store_data(prefix + '_' + level + 'eph_vel' + suffix, data={'x': time_values, 'y': np.transpose(np.array([vx_values, vy_values, vz_values]))})
@@ -53,5 +50,3 @@ def mms_load_eph_tplot(filenames, level='def', probe='1', datatypes=['pos', 'vel
options(prefix + '_' + level + 'eph_vel' + suffix, 'ytitle', 'MMS'+str(probe)+' velocity')
options(prefix + '_' + level + 'eph_vel' + suffix, 'ysubtitle', '[km/s]')
options(prefix + '_' + level + 'eph_vel' + suffix, 'legend_names', ['Vx ECI', 'Vy ECI', 'Vz ECI'])
- options(prefix + '_' + level + 'eph_vel' + suffix, 'color', ['b', 'g', 'r'])
-
diff --git a/pyspedas/mms/mec_ascii/state.py b/pyspedas/mms/mec_ascii/state.py
index 19231467..0a89dbd9 100644
--- a/pyspedas/mms/mec_ascii/state.py
+++ b/pyspedas/mms/mec_ascii/state.py
@@ -1,7 +1,7 @@
-
from pyspedas.mms.mec_ascii.mms_get_state_data import mms_get_state_data
from pyspedas.mms.print_vars import print_vars
+
@print_vars
def mms_load_state(trange=['2015-10-16', '2015-10-17'], probe='1', level='def',
datatypes=['pos', 'vel'], no_update=False, pred_or_def=True, suffix=''):
@@ -12,7 +12,7 @@ def mms_load_state(trange=['2015-10-16', '2015-10-17'], probe='1', level='def',
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -33,7 +33,11 @@ def mms_load_state(trange=['2015-10-16', '2015-10-17'], probe='1', level='def',
Set this flag to preserve the original data. if not set and newer
data is found the existing data will be overwritten
- Returns:
+ pred_or_def: bool
+ Load definitive or predicted (if definitive isn't available); defaults to True
+
+ Returns
+ --------
List of tplot variables created.
"""
diff --git a/pyspedas/mms/mms_events.py b/pyspedas/mms/mms_events.py
new file mode 100644
index 00000000..f9e80b02
--- /dev/null
+++ b/pyspedas/mms/mms_events.py
@@ -0,0 +1,73 @@
+import os
+import logging
+import numpy as np
+import pandas as pd
+from pyspedas import time_string, time_double
+from pyspedas.mms.mms_tai2unix import mms_tai2unix
+from pyspedas.utilities.download import download
+from pyspedas.mms.mms_config import CONFIG
+
+
+def mms_brst_events(trange=None, reload=False):
+ """
+ Prints a list of burst mode segment selections from the MMS data segment database
+
+ Parameters
+ -----------
+ trange: list of str or list of float
+ Time range to list burst mode events
+
+ reload:
+ Re-download the burst mode events database
+ """
+ if trange is None:
+ logging.error('Time range not specified. ')
+ return
+
+ remote_path = 'https://lasp.colorado.edu/mms/sdc/public/service/latis/'
+ remote_file = 'mms_burst_data_segment.csv'
+
+ if reload or not os.path.exists(os.path.join(CONFIG['local_data_dir'], remote_file)):
+ brst_file = download(remote_path=remote_path, remote_file=remote_file, local_path=CONFIG['local_data_dir'])
+ if len(brst_file) > 0:
+ brst_file = brst_file[0]
+ else:
+ brst_file = os.path.join(CONFIG['local_data_dir'], remote_file)
+
+ table = load_csv_file(brst_file)
+
+ descriptions = table['DISCUSSION'].to_numpy()
+ authors = table['SOURCEID'].to_numpy()
+ start_tai = np.float64(table['TAISTARTTIME'].to_numpy())
+ end_tai = np.float64(table['TAIENDTIME'].to_numpy())
+
+ start_unix = mms_tai2unix(start_tai)
+ end_unix = mms_tai2unix(end_tai)
+
+ # sort based on start time
+ sorted_indices = np.argsort(start_unix)
+ descriptions = descriptions[sorted_indices]
+ authors = authors[sorted_indices]
+ start_unix = start_unix[sorted_indices]
+ end_unix = end_unix[sorted_indices]
+
+ trange = time_double(trange)
+ idxs = ((start_unix >= trange[0]) & (start_unix <= trange[1])) & ((end_unix <= trange[1]) & (end_unix >= trange[0]))
+ indices = np.argwhere(idxs).flatten()
+ descriptions = descriptions[indices]
+ authors = authors[indices]
+ start_times = start_unix[indices]
+ end_times = end_unix[indices]
+
+ for desc, author, start_time, end_time in zip(descriptions, authors, start_times, end_times):
+ print(time_string(start_time, fmt='%Y-%m-%d/%H:%M:%S') + ' - ' + time_string(end_time, fmt='%Y-%m-%d/%H:%M:%S') + ': ' + str(desc) + ' (' + str(author) + ')')
+
+
+def load_csv_file(filename, cols=None):
+ """
+ Loads the burst segment CSV file into a pandas data frame
+ """
+ if cols is None:
+ cols = ['DATASEGMENTID', 'TAISTARTTIME', 'TAIENDTIME', 'PARAMETERSETID', 'FOM', 'ISPENDING', 'INPLAYLIST', 'STATUS', 'NUMEVALCYCLES', 'SOURCEID', 'CREATETIME', 'FINISHTIME', 'OBS1NUMBUFS', 'OBS2NUMBUFS', 'OBS3NUMBUFS', 'OBS4NUMBUFS', 'OBS1ALLOCBUFS', 'OBS2ALLOCBUFS', 'OBS3ALLOCBUFS', 'OBS4ALLOCBUFS', 'OBS1REMFILES', 'OBS2REMFILES', 'OBS3REMFILES', 'OBS4REMFILES', 'DISCUSSION', 'empty1', 'empty2']
+ df = pd.read_csv(filename, dtype=str, names=cols, on_bad_lines='skip', skiprows=1)
+ return df
diff --git a/pyspedas/mms/mms_file_filter.py b/pyspedas/mms/mms_file_filter.py
index 23e98615..d3a21356 100644
--- a/pyspedas/mms/mms_file_filter.py
+++ b/pyspedas/mms/mms_file_filter.py
@@ -1,11 +1,12 @@
-
import re
+
def mms_file_filter(files, latest_version=False, major_version=False, min_version=None, version=None):
"""
This function filters a list of MMS data files based on CDF version
- Parameters:
+ Parameters
+ ------------
version: str
Specify a specific CDF version # to return (e.g., cdf_version='4.3.0')
@@ -18,7 +19,8 @@ def mms_file_filter(files, latest_version=False, major_version=False, min_versio
major_version: bool
Only return the latest major CDF version (e.g., X in vX.Y.Z) in the requested time interval
- Returns:
+ Returns
+ ----------
List of filtered files
"""
@@ -81,4 +83,4 @@ def mms_file_filter(files, latest_version=False, major_version=False, min_versio
else:
out_files.append(file_ver[3])
- return out_files
\ No newline at end of file
+ return out_files
diff --git a/pyspedas/mms/mms_files_in_interval.py b/pyspedas/mms/mms_files_in_interval.py
index 93642d21..69ba9ac9 100644
--- a/pyspedas/mms/mms_files_in_interval.py
+++ b/pyspedas/mms/mms_files_in_interval.py
@@ -1,26 +1,27 @@
-
import re
from dateutil.parser import parse
from bisect import bisect_left
+
def mms_files_in_interval(in_files, trange):
- '''
- This function filters the file list returned by the SDC to the requested time range. This filter is purposefully
+ """
+ This function filters the file list returned by the SDC to the requested time range. This filter is purposefully
liberal, it regularly grabs an extra file due to special cases
- Parameters:
+ Parameters
+ -----------
in_files: list of dict
List of hash tables containing files returned by the SDC
trange : list of str
- time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ time range of interest [start time, end time] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
- Returns:
+ Returns
+ ---------
List of hash tables containing file names, sizes and their time tags
-
- '''
+ """
file_name = 'mms.*_([0-9]{8,14})_v(\d+).(\d+).(\d+).cdf'
file_times = []
diff --git a/pyspedas/mms/mms_get_local_files.py b/pyspedas/mms/mms_get_local_files.py
index 0d6bc20d..f5237344 100644
--- a/pyspedas/mms/mms_get_local_files.py
+++ b/pyspedas/mms/mms_get_local_files.py
@@ -1,21 +1,21 @@
+import logging
import os
import re
import shutil
from .mms_config import CONFIG
from .mms_files_in_interval import mms_files_in_interval
-
from dateutil.rrule import rrule, DAILY
from dateutil.parser import parse
-
from datetime import timedelta
-def mms_get_local_files(probe, instrument, data_rate, level, datatype, trange, mirror=False):
+def mms_get_local_files(probe, instrument, data_rate, level, datatype, trange, mirror=False):
"""
Search for local MMS files in case a list cannot be retrieved from the
remote server.
- Parameters:
+ Parameters
+ ------------
probe: str
probe #, e.g., '4' for MMS4
@@ -37,10 +37,10 @@ def mms_get_local_files(probe, instrument, data_rate, level, datatype, trange, m
mirror: bool
if True, copy files from network mirror to local data directory
- Returns:
+ Returns
+ ---------
List of file paths.
"""
-
files_out = []
if mirror:
@@ -51,7 +51,6 @@ def mms_get_local_files(probe, instrument, data_rate, level, datatype, trange, m
else:
data_dir = CONFIG['local_data_dir']
-
# directory and file name search patterns
# -assume directories are of the form:
# (srvy, SITL): spacecraft/instrument/rate/level[/datatype]/year/month/
@@ -79,18 +78,22 @@ def mms_get_local_files(probe, instrument, data_rate, level, datatype, trange, m
else:
full_path = os.sep.join([re.escape(local_dir), file_name])
+ # check for extra /'s in the path
+ if '//' in full_path:
+ full_path = full_path.replace('//', '/')
+
regex = re.compile(full_path)
for root, dirs, files in os.walk(data_dir):
for file in files:
this_file = os.sep.join([root, file])
- if CONFIG['debug_mode']: print('Checking ' + this_file)
- if CONFIG['debug_mode']: print('against: ' + full_path)
+ if CONFIG['debug_mode']: logging.info('Checking ' + this_file)
+ if CONFIG['debug_mode']: logging.info('against: ' + full_path)
matches = regex.match(this_file)
if matches:
this_time = parse(matches.groups()[1])
if this_time >= parse(parse(trange[0]).strftime('%Y-%m-%d')) and this_time <= parse(trange[1])-timedelta(seconds=1):
- if this_file not in files_out:
+ if not any(this_file == f['full_name'] for f in files_out):
files_out.append({'file_name': file, 'timetag': '', 'full_name': this_file, 'file_size': ''})
files_in_interval = mms_files_in_interval(files_out, trange)
@@ -110,12 +113,13 @@ def mms_get_local_files(probe, instrument, data_rate, level, datatype, trange, m
# need to copy files from network mirror to local data directory
for file in local_files:
local_file = file.replace(mirror_dir, local_dir)
- if CONFIG['debug_mode']: print('Copying ' + file + ' to ' + local_file)
+ if CONFIG['debug_mode']:
+ logging.info('Copying ' + file + ' to ' + local_file)
shutil.copyfile(file, local_file)
local_files_copied.append(local_file)
local_files = local_files_copied
for file in local_files:
- print('Loading: ' + file)
+ logging.info('Loading: ' + file)
- return local_files
\ No newline at end of file
+ return local_files
diff --git a/pyspedas/mms/mms_load_brst_segments.py b/pyspedas/mms/mms_load_brst_segments.py
index 51a528f1..b212aa16 100644
--- a/pyspedas/mms/mms_load_brst_segments.py
+++ b/pyspedas/mms/mms_load_brst_segments.py
@@ -1,32 +1,36 @@
-
import os
import logging
import numpy as np
from scipy.io import readsav
from pytplot import store_data, options
-
from pyspedas import time_double
from pyspedas.utilities.download import download
from pyspedas.mms.mms_config import CONFIG
-
-logging.captureWarnings(True)
-logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+from pyspedas.mms.mms_update_brst_intervals import mms_update_brst_intervals
-def mms_load_brst_segments(trange=None, suffix=''):
- '''
+def mms_load_brst_segments(trange=None, suffix='', sdc=True):
+ """
This function loads the burst segment intervals
- Parameters:
+ Parameters
+ -----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
- Returns:
- Tuple containing (start_times, end_times)
+ suffix: str
+ String to append to the end of the tplot variable names
+
+ sdc: bool
+ Flag to download the data from the SDC instead of spedas.org
+ (spedas.org is out of date, but faster); defaults to True
- '''
+ Returns
+ ---------
+ Tuple containing (start_times, end_times)
+ """
if trange is None:
logging.error('Error; no trange specified.')
@@ -34,22 +38,33 @@ def mms_load_brst_segments(trange=None, suffix=''):
tr = time_double(trange)
- save_file = os.path.join(CONFIG['local_data_dir'], 'mms_brst_intervals.sav')
- brst_file = download(remote_file='http://www.spedas.org/mms/mms_brst_intervals.sav',
- local_file=save_file)
+ if not sdc:
+ save_file = os.path.join(CONFIG['local_data_dir'], 'mms_brst_intervals.sav')
+ brst_file = download(remote_file='http://www.spedas.org/mms/mms_brst_intervals.sav',
+ local_file=save_file)
- if len(brst_file) == 0:
- logging.error('Error downloading burst intervals sav file')
- return None
+ if len(brst_file) == 0:
+ logging.error('Error downloading burst intervals sav file')
+ return None
- try:
- intervals = readsav(save_file)
- except FileNotFoundError:
- logging.error('Error loading burst intervals sav file: ' + save_file)
- return None
+ try:
+ intervals = readsav(save_file)
+ except FileNotFoundError:
+ logging.error('Error loading burst intervals sav file: ' + save_file)
+ return None
+
+ unix_start = intervals['brst_intervals'].start_times[0]
+ unix_end = intervals['brst_intervals'].end_times[0]
- unix_start = intervals['brst_intervals'].start_times[0]
- unix_end = intervals['brst_intervals'].end_times[0]
+ else:
+ intervals = mms_update_brst_intervals()
+
+ if intervals is not None:
+ unix_start = np.array(intervals['start_times'])
+ unix_end = np.array(intervals['end_times'])
+ else:
+ logging.error('Error downloading latest burst intervals file.')
+ return
sorted_idxs = np.argsort(unix_start)
unix_start = unix_start[sorted_idxs]
@@ -60,9 +75,13 @@ def mms_load_brst_segments(trange=None, suffix=''):
unix_start = unix_start[times_in_range]
unix_end = unix_end[times_in_range]
+ if len(unix_start) == 0:
+ logging.error('No burst intervals found in the time range.')
+ return
+
# +10 second offset added; there appears to be an extra 10
# seconds of data, consistently, not included in the range here
- unix_end = [end_time+10.0 for end_time in unix_end]
+ unix_end = np.array([end_time+10.0 for end_time in unix_end])
bar_x = []
bar_y = []
@@ -86,4 +105,4 @@ def mms_load_brst_segments(trange=None, suffix=''):
options('mms_bss_burst'+suffix, 'legend_names', ['Burst'])
options('mms_bss_burst'+suffix, 'ytitle', '')
- return (unix_start, unix_end)
+ return unix_start, unix_end
diff --git a/pyspedas/mms/mms_load_data.py b/pyspedas/mms/mms_load_data.py
index 7aa374d9..67739ab7 100644
--- a/pyspedas/mms/mms_load_data.py
+++ b/pyspedas/mms/mms_load_data.py
@@ -5,7 +5,7 @@
import pkg_resources
import numpy as np
from pytplot import cdf_to_tplot
-from ..analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pyspedas import time_double, time_string
from dateutil.parser import parse
from datetime import timedelta, datetime
@@ -18,8 +18,6 @@
from .mms_file_filter import mms_file_filter
from .mms_load_data_spdf import mms_load_data_spdf
-logging.captureWarnings(True)
-logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy', level='l2',
instrument='fgm', datatype='', varformat=None, prefix='', suffix='', get_support_data=False, time_clip=False,
@@ -28,10 +26,8 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
"""
This function loads MMS data into pyTplot variables
- This function is not meant to be called directly. Please see the individual load routines for documentation and use.
-
+ This function is not meant to be called directly. Please see the individual load routines for documentation and use.
"""
-
if not isinstance(probe, list): probe = [probe]
if not isinstance(data_rate, list): data_rate = [data_rate]
if not isinstance(level, list): level = [level]
@@ -48,7 +44,6 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
trange[0] = time_string(trange[0])
if isinstance(trange[1], float):
trange[1] = time_string(trange[1])
-
download_only = CONFIG['download_only']
@@ -93,6 +88,9 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
for lvl in level:
for dtype in datatype:
+
+ file_found = False
+
if user is None:
url = 'https://lasp.colorado.edu/mms/sdc/public/files/api/v1/file_info/science?start_date=' + start_date + '&end_date=' + end_date + '&sc_id=mms' + prb + '&instrument_id=' + instrument + '&data_rate_mode=' + drate + '&data_level=' + lvl
else:
@@ -103,7 +101,7 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
if CONFIG['debug_mode']: logging.info('Fetching: ' + url)
- if no_download == False:
+ if not no_download:
# query list of available files
try:
with warnings.catch_warnings():
@@ -137,6 +135,7 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
if os.path.exists(out_file) and str(os.stat(out_file).st_size) == str(file['file_size']):
if not download_only: logging.info('Loading ' + out_file)
out_files.append(out_file)
+ file_found = True
continue
if user is None:
@@ -160,23 +159,27 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
# if the download was successful, copy to data directory
copy(ftmp.name, out_file)
out_files.append(out_file)
+ file_found = True
fsrc.close()
ftmp.close()
+ os.unlink(ftmp.name) # delete the temporary file
except requests.exceptions.ConnectionError:
# No/bad internet connection; try loading the files locally
logging.error('No internet connection!')
-
- if out_files == []:
- if not download_only: logging.info('Searching for local files...')
- out_files = mms_get_local_files(prb, instrument, drate, lvl, dtype, trange)
+ if not file_found:
+ added_local_files = False
+ if not download_only:
+ logging.info('Searching for local files...')
+ out_files.extend(mms_get_local_files(prb, instrument, drate, lvl, dtype, trange))
+ added_local_files = True
- if out_files == [] and CONFIG['mirror_data_dir'] != None:
+ if added_local_files and CONFIG['mirror_data_dir'] is not None:
# check for network mirror; note: network mirrors are assumed to be read-only
# and we always copy the files from the mirror to the local data directory
# before trying to load into tplot variables
logging.info('No local files found; checking network mirror...')
- out_files = mms_get_local_files(prb, instrument, drate, lvl, dtype, trange, mirror=True)
+ out_files.extend(mms_get_local_files(prb, instrument, drate, lvl, dtype, trange, mirror=True))
if not no_download:
sdc_session.close()
@@ -188,7 +191,7 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
out_files = sorted(out_files)
filtered_out_files = mms_file_filter(out_files, latest_version=latest_version, major_version=major_version, min_version=min_version, version=cdf_version)
- if filtered_out_files == []:
+ if not filtered_out_files:
logging.info('No matching CDF versions found.')
return
@@ -197,7 +200,7 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
if notplot:
return new_variables
- if new_variables == []:
+ if not new_variables:
logging.warning('No data loaded.')
return
@@ -208,8 +211,3 @@ def mms_load_data(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srv
return new_variables
else:
return out_files
-
-
-
-
-
diff --git a/pyspedas/mms/mms_load_data_spdf.py b/pyspedas/mms/mms_load_data_spdf.py
index a5cd781d..e905f0c8 100644
--- a/pyspedas/mms/mms_load_data_spdf.py
+++ b/pyspedas/mms/mms_load_data_spdf.py
@@ -1,14 +1,16 @@
+import logging
from pyspedas import time_double
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
from .mms_file_filter import mms_file_filter
+from .mms_get_local_files import mms_get_local_files
from pytplot import cdf_to_tplot
-from pyspedas.analysis.time_clip import time_clip as tclip
-
+from pytplot import time_clip as tclip
from .mms_config import CONFIG
CONFIG['remote_data_dir'] = 'https://spdf.gsfc.nasa.gov/pub/data/mms/'
+
def mms_load_data_spdf(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy', level='l2',
instrument='fgm', datatype='', varformat=None, suffix='', get_support_data=False, time_clip=False,
no_update=False, center_measurement=False, available=False, notplot=False, latest_version=False,
@@ -16,8 +18,7 @@ def mms_load_data_spdf(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate
"""
This function loads MMS data from NASA SPDF into pyTplot variables
- This function is not meant to be called directly. Please see the individual load routines for documentation and use.
-
+ This function is not meant to be called directly. Please see the individual load routines for documentation and use.
"""
tvars_created = []
@@ -27,7 +28,6 @@ def mms_load_data_spdf(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate
if not isinstance(level, list): level = [level]
if not isinstance(datatype, list): datatype = [datatype]
-
for prb in probe:
for lvl in level:
for drate in data_rate:
@@ -82,7 +82,18 @@ def mms_load_data_spdf(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate
if files is not None:
for file in files:
out_files.append(file)
-
+
+ if not out_files:
+ logging.info('Searching for local files...')
+ out_files = mms_get_local_files(prb, instrument, drate, lvl, dtype, trange)
+
+ if not out_files and CONFIG['mirror_data_dir'] != None:
+ # check for network mirror; note: network mirrors are assumed to be read-only
+ # and we always copy the files from the mirror to the local data directory
+ # before trying to load into tplot variables
+ logging.info('No local files found; checking network mirror...')
+ out_files = mms_get_local_files(prb, instrument, drate, lvl, dtype, trange, mirror=True)
+
out_files = sorted(out_files)
filtered_out_files = mms_file_filter(out_files, latest_version=latest_version, major_version=major_version, min_version=min_version, version=cdf_version)
diff --git a/pyspedas/mms/mms_load_fast_segments.py b/pyspedas/mms/mms_load_fast_segments.py
index 4f3dd272..6d287eb7 100644
--- a/pyspedas/mms/mms_load_fast_segments.py
+++ b/pyspedas/mms/mms_load_fast_segments.py
@@ -1,36 +1,32 @@
-
import os
import logging
import numpy as np
from scipy.io import readsav
from pytplot import store_data, options
-
from pyspedas import time_double
from pyspedas.utilities.download import download
from pyspedas.mms.mms_config import CONFIG
-logging.captureWarnings(True)
-logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
-
def mms_load_fast_segments(trange=None, suffix=''):
- '''
+ """
This function loads the fast segment intervals
- Parameters:
+ Parameters
+ -----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
suffix: str
Suffix to append to the end of the tplot variables
- Returns:
+ Returns
+ --------
Tuple containing (start_times, end_times)
- '''
-
+ """
if trange is None:
logging.error('Error; no trange specified.')
return None
@@ -81,4 +77,4 @@ def mms_load_fast_segments(trange=None, suffix=''):
options('mms_bss_fast'+suffix, 'legend_names', ['Fast'])
options('mms_bss_fast'+suffix, 'ytitle', '')
- return (unix_start, unix_end)
+ return unix_start, unix_end
diff --git a/pyspedas/mms/mms_load_sroi_segments.py b/pyspedas/mms/mms_load_sroi_segments.py
index 666d34d1..d02d9991 100644
--- a/pyspedas/mms/mms_load_sroi_segments.py
+++ b/pyspedas/mms/mms_load_sroi_segments.py
@@ -1,4 +1,3 @@
-
import csv
import logging
import requests
@@ -6,8 +5,6 @@
from pytplot import store_data, options
from pyspedas import time_double, time_string
-logging.captureWarnings(True)
-logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
def get_mms_srois(start_time=None, end_time=None, sc_id=None):
if start_time is None:
@@ -65,7 +62,8 @@ def mms_load_sroi_segments(trange=None, probe=1, suffix=''):
"""
This function loads the Science Region of Interest (SRoI) segment intervals
- Parameters:
+ Parameters
+ ------------
trange: list of str
time range of interest [starttime, endtime] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
@@ -77,11 +75,11 @@ def mms_load_sroi_segments(trange=None, probe=1, suffix=''):
suffix: str
Suffix to append to the end of the tplot variables
- Returns:
+ Returns
+ ---------
Tuple containing (start_times, end_times)
"""
-
if not isinstance(probe, str):
probe = str(probe)
@@ -130,4 +128,4 @@ def mms_load_sroi_segments(trange=None, probe=1, suffix=''):
options('mms' + probe + '_bss_sroi'+suffix, 'legend_names', ['Fast'])
options('mms' + probe + '_bss_sroi'+suffix, 'ytitle', '')
- return (start_out, end_out)
+ return start_out, end_out
diff --git a/pyspedas/mms/mms_login_lasp.py b/pyspedas/mms/mms_login_lasp.py
index 677d45d2..4d020931 100644
--- a/pyspedas/mms/mms_login_lasp.py
+++ b/pyspedas/mms/mms_login_lasp.py
@@ -1,4 +1,3 @@
-
from getpass import getpass
from scipy.io import readsav
import requests
@@ -7,14 +6,11 @@
import logging
import warnings
-logging.captureWarnings(True)
-logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
def mms_login_lasp(always_prompt=False, headers={}):
- '''
+ """
This function logs the user into the SDC and returns a tuple with: (requests.Session object, username)
- '''
-
+ """
homedir = os.path.expanduser('~')
user_input_passwd = False
saved_auth = None
@@ -36,7 +32,12 @@ def mms_login_lasp(always_prompt=False, headers={}):
pass
if saved_auth is None or always_prompt == True:
- user = input('SDC username (blank for public access): ')
+ try:
+ user = input('SDC username (blank for public access): ')
+ except:
+ logging.error('Error while reading SDC username/password; defaulting to public user...')
+ user = ''
+
if user != '':
passwd = getpass()
else: passwd = ''
@@ -57,7 +58,7 @@ def mms_login_lasp(always_prompt=False, headers={}):
auth = session.post('https://lasp.colorado.edu', verify=True, timeout=5, headers=headers)
testget = session.get('https://lasp.colorado.edu/mms/sdc/sitl/files/api/v1/download/science', verify=True, timeout=5, headers=headers)
except:
- return (session, None)
+ return session, None
# check if the login failed
if testget.status_code == 401:
@@ -71,4 +72,4 @@ def mms_login_lasp(always_prompt=False, headers={}):
if user == '':
user = None
- return (session, user)
+ return session, user
diff --git a/pyspedas/mms/mms_orbit_plot.py b/pyspedas/mms/mms_orbit_plot.py
index c930bb78..a86a9682 100644
--- a/pyspedas/mms/mms_orbit_plot.py
+++ b/pyspedas/mms/mms_orbit_plot.py
@@ -1,20 +1,43 @@
-
+import logging
import os
import matplotlib.pyplot as plt
from pytplot import get_data
from . import mms_load_mec
-def mms_orbit_plot(trange=['2015-10-16', '2015-10-17'], probes=[1, 2, 3, 4], data_rate='srvy', xr=None, yr=None, plane='xy', coord='gse'):
+
+def mms_orbit_plot(trange=['2015-10-16', '2015-10-17'],
+ probes=[1, 2, 3, 4],
+ data_rate='srvy',
+ xr=None,
+ yr=None,
+ plane='xy',
+ coord='gse',
+ xsize=5,
+ ysize=5,
+ marker='x',
+ markevery=10,
+ markersize=5,
+ earth=True,
+ dpi=300,
+ save_png='',
+ save_pdf='',
+ save_eps='',
+ save_jpeg='',
+ save_svg='',
+ return_plot_objects=False,
+ display=True
+ ):
"""
This function creates MMS orbit plots
- Parameters:
+ Parameters
+ -----------
trange : list of str
time range of interest [starttime, endtime] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
- probe: list of str
+ probes: list of str
probe #, e.g., '4' for MMS4
data_rate: str
@@ -32,59 +55,103 @@ def mms_orbit_plot(trange=['2015-10-16', '2015-10-17'], probes=[1, 2, 3, 4], dat
coord: str
coordinate system
+ xsize: float
+ size of the figure in the x-direction, in inches (default: 5)
+
+ ysize: float
+ size of the figure in the y-direction, in inches (default: 5)
+
+ marker: str
+ marker style for the data points (default: 'x')
+
+ markevery: int or sequence of int
+ plot a marker at every n-th data point (default: 10)
+
+ markersize: float
+ size of the marker in points (default: 5)
+
+ earth: bool
+ plot a reference image of the Earth (default: True)
+
+ dpi: int
+ dots per inch for the plot (default: 300)
+
+ save_png: str
+ file path to save the plot as a PNG file (default: None)
+
+ save_pdf: str
+ file path to save the plot as a PDF file (default: None)
+
+ save_eps: str
+ file path to save the plot as an EPS file (default: None)
+
+ save_jpeg: str
+ file path to save the plot as a JPEG file (default: None)
+
+ save_svg: str
+ file path to save the plot as an SVG file (default: None)
+
+ return_plot_objects: bool
+ whether to return the plot objects as a tuple (default: False)
+
+ display: bool
+ whether to display the plot using matplotlib's `show()` function (default: True)
+
"""
- spacecraft_colors = [(0,0,0), (213/255,94/255,0), (0,158/255,115/255), (86/255,180/255,233/255)]
+ spacecraft_colors = [(0, 0, 0), (213/255, 94/255, 0), (0, 158/255, 115/255), (86/255, 180/255, 233/255)]
mec_vars = mms_load_mec(trange=trange, data_rate=data_rate, probe=probes, varformat='*_r_' + coord, time_clip=True)
if len(mec_vars) == 0:
- print('Problem loading MEC data')
+ logging.error('Problem loading MEC data')
return
plane = plane.lower()
coord = coord.lower()
if plane not in ['xy', 'yz', 'xz']:
- print('Error, invalid plane specified; valid options are: xy, yz, xz')
+ logging.error('Error, invalid plane specified; valid options are: xy, yz, xz')
return
if coord not in ['eci', 'gsm', 'geo', 'sm', 'gse', 'gse2000']:
- print('Error, invalid coordinate system specified; valid options are: eci, gsm, geo, sm, gse, gse2000')
+ logging.error('Error, invalid coordinate system specified; valid options are: eci, gsm, geo, sm, gse, gse2000')
return
km_in_re = 6371.2
- fig, axis = plt.subplots(sharey=True, sharex=True)
+ fig, axis = plt.subplots(sharey=True, sharex=True, figsize=(xsize, ysize))
+
+ if earth:
+ im = plt.imread(os.path.dirname(os.path.realpath(__file__)) + '/mec/earth_polar1.png')
+ plt.imshow(im, extent=(-1, 1, -1, 1))
- im = plt.imread(os.path.dirname(os.path.realpath(__file__)) + '/mec/earth_polar1.png')
- plt.imshow(im, extent=(-1, 1, -1, 1))
plot_count = 0
for probe in probes:
position_data = get_data('mms' + str(probe) + '_mec_r_' + coord)
if position_data is None:
- print('No ' + data_rate + ' MEC data found for ' + 'MMS' + str(probe))
+ logging.error('No ' + data_rate + ' MEC data found for ' + 'MMS' + str(probe))
continue
else:
t, d = position_data
plot_count += 1
if plane == 'xy':
- axis.plot(d[:, 0]/km_in_re, d[:, 1]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1])
+ axis.plot(d[:, 0]/km_in_re, d[:, 1]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1], marker=marker, markevery=markevery, markersize=markersize)
axis.set_xlabel('X Position, Re')
axis.set_ylabel('Y Position, Re')
if plane == 'yz':
- axis.plot(d[:, 1]/km_in_re, d[:, 2]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1])
+ axis.plot(d[:, 1]/km_in_re, d[:, 2]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1], marker=marker, markevery=markevery, markersize=markersize)
axis.set_xlabel('Y Position, Re')
axis.set_ylabel('Z Position, Re')
if plane == 'xz':
- axis.plot(d[:, 0]/km_in_re, d[:, 2]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1])
+ axis.plot(d[:, 0]/km_in_re, d[:, 2]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1], marker=marker, markevery=markevery, markersize=markersize)
axis.set_xlabel('X Position, Re')
axis.set_ylabel('Z Position, Re')
axis.set_aspect('equal')
- if plot_count > 0: # at least one plot created
+ if plot_count > 0: # at least one plot created
axis.legend()
axis.set_title(trange[0] + ' to ' + trange[1])
axis.annotate(coord.upper() + ' coordinates', xy=(0.6, 0.05), xycoords='axes fraction')
@@ -93,5 +160,23 @@ def mms_orbit_plot(trange=['2015-10-16', '2015-10-17'], probes=[1, 2, 3, 4], dat
if yr is not None:
axis.set_ylim(yr)
- plt.show()
+ if return_plot_objects:
+ return fig, axis
+
+ if save_png is not None and save_png != '':
+ plt.savefig(save_png + '.png', dpi=dpi)
+
+ if save_eps is not None and save_eps != '':
+ plt.savefig(save_eps + '.eps', dpi=dpi)
+
+ if save_svg is not None and save_svg != '':
+ plt.savefig(save_svg + '.svg', dpi=dpi)
+
+ if save_pdf is not None and save_pdf != '':
+ plt.savefig(save_pdf + '.pdf', dpi=dpi)
+
+ if save_jpeg is not None and save_jpeg != '':
+ plt.savefig(save_jpeg + '.jpeg', dpi=dpi)
+ if display:
+ plt.show()
diff --git a/pyspedas/mms/mms_tai2unix.py b/pyspedas/mms/mms_tai2unix.py
new file mode 100644
index 00000000..5b98f66b
--- /dev/null
+++ b/pyspedas/mms/mms_tai2unix.py
@@ -0,0 +1,37 @@
+import datetime
+import numpy as np
+from pyspedas.utilities.leap_seconds import load_leap_table
+
+
+def mms_tai2unix(values):
+ """
+ Converts MMS timestamps in TAI to unix timestamps
+
+ Based on Mitsuo Oka's IDL code with the same name.
+
+ Input
+ ---------
+ values: float, list of floats or np.ndarray
+ Time values in TAI
+
+ Returns
+ ---------
+ Array of time values as unix times
+
+ """
+ if not isinstance(values, list) and not isinstance(values, np.ndarray):
+ values = [values]
+ table = load_leap_table()
+ tai_minus_unix = 378691200.0
+ juls = np.array(table['juls'])
+ values_juls = np.array(values)/86400.0 + datetime.date(1958, 1, 1).toordinal() + 1721424.5
+ out = np.zeros(len(values))
+ for idx, value in enumerate(values_juls):
+ loc_greater = np.argwhere(value > juls).flatten()
+ if len(loc_greater) == 0:
+ continue
+ last_loc = loc_greater[len(loc_greater)-1]
+ current_leap = float(table['leaps'][last_loc])
+ tinput_1970 = values[idx] - tai_minus_unix
+ out[idx] = tinput_1970 - current_leap
+ return out
diff --git a/pyspedas/mms/mms_update_brst_intervals.py b/pyspedas/mms/mms_update_brst_intervals.py
new file mode 100644
index 00000000..a776a63e
--- /dev/null
+++ b/pyspedas/mms/mms_update_brst_intervals.py
@@ -0,0 +1,96 @@
+import os
+import csv
+import time
+import logging
+import pickle
+import numpy as np
+from pytplot import time_double, time_string
+from pyspedas.mms.mms_login_lasp import mms_login_lasp
+from pyspedas.utilities.download import download
+from pyspedas.mms.mms_config import CONFIG
+from pyspedas.mms.mms_tai2unix import mms_tai2unix
+
+
+def mms_update_brst_intervals():
+ """
+ This function downloads and caches the current mms_burst_data_segment.csv
+ file from the MMS SDC
+ """
+ # not sure if logging in is still important for these
+ # so this code might be unnecessary now; for now it
+ # remains to match the IDL functionality
+ login = mms_login_lasp()
+
+ if login is None:
+ logging.error('Error logging into the LASP SDC.')
+ return
+
+ session, user = login
+
+ # grab ~6 months of burst intervals at a time
+ start_interval = '2015-03-01'
+ end_interval = time_double(start_interval) + 6.*30*24*60*60
+
+ unix_starts = []
+ unix_ends = []
+
+ while time_double(start_interval) <= time_double(time.time()):
+ start_str = time_string(time_double(start_interval), fmt='%d-%b-%Y')
+ end_str = time_string(end_interval, fmt='%d-%b-%Y')
+
+ logging.info(f'Downloading updates for {start_str} - {end_str}')
+
+ remote_path = 'https://lasp.colorado.edu/mms/sdc/public/service/latis/'
+ remote_file = f'mms_burst_data_segment.csv?FINISHTIME>={start_str}+&FINISHTIME<{end_str}'
+
+ brst_file = download(remote_path=remote_path, remote_file=remote_file,
+ local_file=os.path.join(CONFIG['local_data_dir'], 'mms_burst_data_segment.csv'),
+ session=session, no_wildcards=True)
+
+ if isinstance(brst_file, list):
+ # should only be one file
+ brst_file = brst_file[0]
+
+ times = load_csv_file(brst_file)
+ if not isinstance(times, tuple) or len(times) != 3:
+ logging.error('Error loading the CSV file')
+ return
+
+ taistarttime, taiendtime, status = times
+
+ complete_idxs = np.argwhere(status == 'COMPLETE+FINISHED').flatten()
+ if len(complete_idxs) != 0:
+ tai_starts = taistarttime[complete_idxs]
+ tai_ends = taiendtime[complete_idxs]
+
+ unix_starts.extend(mms_tai2unix(tai_starts))
+ unix_ends.extend(mms_tai2unix(tai_ends))
+
+ logging.info(f'Done grabbing updates for {start_str}-{end_str}')
+
+ start_interval = end_interval
+ end_interval = time_double(start_interval) + 6. * 30 * 24 * 60 * 60
+
+ brst_intervals = {'start_times': unix_starts,
+ 'end_times': unix_ends}
+
+ with open(os.path.join(CONFIG['local_data_dir'], 'mms_brst_intervals.pickle'), "wb") as file:
+ pickle.dump(brst_intervals, file)
+
+ logging.info(f'Burst intervals updated! Last interval in the file: {time_string(unix_starts[-1])}-{time_string(unix_ends[-1])}')
+
+ return brst_intervals
+
+
+def load_csv_file(filename):
+ taistarttime = []
+ taiendtime = []
+ status = []
+ with open(filename, 'r') as file:
+ reader = csv.reader(file)
+ next(reader) # skip the header row
+ for row in reader:
+ taistarttime.append(int(row[1]))
+ taiendtime.append(int(row[2]))
+ status.append(row[7])
+ return np.array(taistarttime), np.array(taiendtime), np.array(status)
diff --git a/pyspedas/mms/particles/mms_convert_flux_units.py b/pyspedas/mms/particles/mms_convert_flux_units.py
index 2fc8af3a..e647c86a 100644
--- a/pyspedas/mms/particles/mms_convert_flux_units.py
+++ b/pyspedas/mms/particles/mms_convert_flux_units.py
@@ -4,6 +4,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_convert_flux_units(data_in, units=None):
"""
Perform unit conversions for MMS particle data structures
@@ -28,7 +29,6 @@ def mms_convert_flux_units(data_in, units=None):
3D particle data structure with the data in the units specified by
the 'units' parameter
"""
-
if units is None:
logging.error('Error, no units specified')
return None
@@ -50,21 +50,21 @@ def mms_convert_flux_units(data_in, units=None):
# get mass of species
if species_lc == 'i':
- A = 1.0 # H+
+ A = 1.0 # H+
elif species_lc == 'proton':
- A = 1.0 # H+
+ A = 1.0 # H+
elif species_lc == 'hplus':
- A = 1.0 # H+
+ A = 1.0 # H+
elif species_lc == 'heplus':
- A = 4.0 # He+
+ A = 4.0 # He+
elif species_lc == 'heplusplus':
- A = 4.0 # He++
+ A = 4.0 # He++
elif species_lc == 'oplus':
- A = 16.0 # O+
+ A = 16.0 # O+
elif species_lc == 'oplusplus':
- A = 16.0 # O++
+ A = 16.0 # O++
elif species_lc == 'e':
- A = 1.0/1836.0 # e-
+ A = 1.0/1836.0 # e-
# scaling factor between df and flux units
flux_to_df = A**2.0 * 0.5447 * 1e6
@@ -77,8 +77,8 @@ def mms_convert_flux_units(data_in, units=None):
# rather than multiplying by all applicable in/out factors
# these exponents should always be integers!
# [energy, flux_to_df, cm_to_km]
- exp_in = [0,0,0]
- exp_out = [0,0,0]
+ exp_in = [0, 0, 0]
+ exp_out = [0, 0, 0]
# get input/output scaling exponents
if units_in == 'flux':
@@ -100,4 +100,4 @@ def mms_convert_flux_units(data_in, units=None):
data_out['units_name'] = units_out
data_out['data'] = data_in['data']*data_in['energy']**exp[0]*(flux_to_df**exp[1]*cm_to_km**exp[2])
- return data_out
\ No newline at end of file
+ return data_out
diff --git a/pyspedas/mms/particles/mms_part_des_photoelectrons.py b/pyspedas/mms/particles/mms_part_des_photoelectrons.py
index f99c65dd..fced2adb 100644
--- a/pyspedas/mms/particles/mms_part_des_photoelectrons.py
+++ b/pyspedas/mms/particles/mms_part_des_photoelectrons.py
@@ -1,4 +1,3 @@
-
import os
import logging
from pyspedas.mms.mms_config import CONFIG
@@ -8,6 +7,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_part_des_photoelectrons(dist_var):
"""
Loads and returns the FPI/DES photoelectron model based on stepper ID
@@ -75,4 +75,4 @@ def mms_part_des_photoelectrons(dist_var):
# shouldn't get here
logging.error('Error: something went wrong with the photoelectron model')
- return
\ No newline at end of file
+ return
diff --git a/pyspedas/mms/particles/mms_part_getspec.py b/pyspedas/mms/particles/mms_part_getspec.py
index 4efe1b4d..58a2c738 100644
--- a/pyspedas/mms/particles/mms_part_getspec.py
+++ b/pyspedas/mms/particles/mms_part_getspec.py
@@ -1,19 +1,35 @@
-
import logging
from time import time
-
from pyspedas import time_double, mms
from pyspedas.mms.particles.mms_part_products import mms_part_products
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
-def mms_part_getspec(instrument='fpi', probe='1', species='e', data_rate='fast',
- trange=None, output=['energy', 'theta', 'phi', 'pa', 'gyro'], units='eflux', energy=None,
- phi=None, theta=None, pitch=None, gyro=None, mag_data_rate=None, scpot_data_rate=None, fac_type='mphigeo',
- center_measurement=False, spdf=False, correct_photoelectrons=False,
- internal_photoelectron_corrections=False, disable_photoelectron_corrections=False, zero_negative_values=False,
- regrid=[32, 16], no_regrid=False):
+
+def mms_part_getspec(instrument='fpi',
+ probe='1',
+ species='e',
+ data_rate='fast',
+ trange=None,
+ output=['energy', 'theta', 'phi', 'pa', 'gyro'],
+ units='eflux',
+ energy=None,
+ phi=None,
+ theta=None,
+ pitch=None,
+ gyro=None,
+ mag_data_rate=None,
+ scpot_data_rate=None,
+ fac_type='mphigeo',
+ center_measurement=False,
+ spdf=False,
+ correct_photoelectrons=False,
+ internal_photoelectron_corrections=False,
+ disable_photoelectron_corrections=False,
+ zero_negative_values=False,
+ regrid=[32, 16],
+ no_regrid=False):
"""
Generate spectra and moments from 3D MMS particle data
@@ -105,9 +121,8 @@ def mms_part_getspec(instrument='fpi', probe='1', species='e', data_rate='fast',
start_time = time()
if trange is None:
- # test data for development
- trange = ['2015-10-16/13:06', '2015-10-16/13:07']
- # data_rate = 'brst'
+ logging.error('Time range not specified; please specify time range using the trange keyword.')
+ return
if mag_data_rate is None:
if data_rate == 'brst':
@@ -198,4 +213,4 @@ def mms_part_getspec(instrument='fpi', probe='1', species='e', data_rate='fast',
logging.info('Finished; time to run: ' + str(round(time()-start_time, 1)) + ' seconds.')
- return out_vars
\ No newline at end of file
+ return out_vars
diff --git a/pyspedas/mms/particles/mms_part_products.py b/pyspedas/mms/particles/mms_part_products.py
index 6f2f4753..35755500 100644
--- a/pyspedas/mms/particles/mms_part_products.py
+++ b/pyspedas/mms/particles/mms_part_products.py
@@ -28,11 +28,29 @@
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
-def mms_part_products(in_tvarname, units='eflux', species='e', data_rate='fast', instrument='fpi', probe='1',
- output=['energy', 'theta', 'phi'], energy=None, phi=None, theta=None, pitch=None, gyro=None, mag_name=None,
- pos_name=None, fac_type='mphigeo', sc_pot_name=None, correct_photoelectrons=False, zero_negative_values=False,
- internal_photoelectron_corrections=False, disable_photoelectron_corrections=False, no_regrid=False,
- regrid=[32, 16], vel_name=None):
+def mms_part_products(in_tvarname,
+ units='eflux',
+ species='e',
+ data_rate='fast',
+ instrument='fpi',
+ probe='1',
+ output=['energy', 'theta', 'phi'],
+ energy=None,
+ phi=None,
+ theta=None,
+ pitch=None,
+ gyro=None,
+ mag_name=None,
+ pos_name=None,
+ fac_type='mphigeo',
+ sc_pot_name=None,
+ correct_photoelectrons=False,
+ zero_negative_values=False,
+ internal_photoelectron_corrections=False,
+ disable_photoelectron_corrections=False,
+ no_regrid=False,
+ regrid=[32, 16],
+ vel_name=None):
"""
Generate spectra and moments from 3D MMS particle data; note: this routine isn't
meant to be called directly - see the wrapper mms_part_getspec instead.
@@ -204,10 +222,10 @@ def mms_part_products(in_tvarname, units='eflux', species='e', data_rate='fast',
corrections, including photoelectron removal and spacecraft potential.
The official moments released by the instrument teams include these and
are the scientific products that should be used for analysis."""
- print('==================================================================================')
- print('WARNING:')
- print(msg)
- print('==================================================================================')
+ logging.warning('==================================================================================')
+ logging.warning('WARNING:')
+ logging.warning(msg)
+ logging.warning('==================================================================================')
# grab the DES photoelectron model if needed
if (instrument != 'fpi' or species != 'e') and (correct_photoelectrons or internal_photoelectron_corrections):
@@ -369,4 +387,4 @@ def mms_part_products(in_tvarname, units='eflux', species='e', data_rate='fast',
spd_pgs_make_tplot(in_tvarname+'_gyro', x=data_times, y=out_gyro_y, z=out_gyro, units=units, ytitle=dist_in['data_name'], ysubtitle='gyro (deg)')
out_vars.append(in_tvarname+'_gyro')
- return out_vars
\ No newline at end of file
+ return out_vars
diff --git a/pyspedas/mms/particles/mms_part_slice2d.py b/pyspedas/mms/particles/mms_part_slice2d.py
index 28e3b843..48635b37 100644
--- a/pyspedas/mms/particles/mms_part_slice2d.py
+++ b/pyspedas/mms/particles/mms_part_slice2d.py
@@ -1,3 +1,4 @@
+import logging
import pyspedas
from pyspedas import time_double
from pyspedas.mms.fpi.mms_get_fpi_dist import mms_get_fpi_dist
@@ -27,6 +28,8 @@ def mms_part_slice2d(trange=None,
species=None,
rotation='xy',
custom_rotation=None,
+ slice_x=None,
+ slice_norm=None,
subtract_bulk=False,
xrange=None,
yrange=None,
@@ -34,6 +37,7 @@ def mms_part_slice2d(trange=None,
resolution=None,
interpolation='geometric',
contours=False,
+ title=None,
smooth=None,
save_jpeg=None,
save_png=None,
@@ -43,6 +47,7 @@ def mms_part_slice2d(trange=None,
plotsize=10,
dpi=None,
return_slice=False,
+ cmap=None,
display=True):
"""
This routine creates 2D slices of 3D distribution function data from the FPI and HPCA instruments.
@@ -119,6 +124,17 @@ def mms_part_slice2d(trange=None,
If the time window covers multiple matrices they will be averaged.
This is applied before other transformations
+ slice_x: str or np.ndarray
+ Specifies the slice plane's x-axis within the coordinates
+ specified by custom_rotation and rotation. If not specified, the given
+ coordinate's x-axis will be used. If slice_x is not perpendicular to the
+ normal, its projection onto the slice plane will be used.
+
+ slice_norm: str or np.ndarray
+ Specifies the slice plane's normal within the coordinates
+ specified by custom_rotation and rotation; if not specified, the given
+ coordinate's z-axis will be used (slice along by x-y plane in those coordinates).
+
energy: bool
Flag to plot data against energy (in eV) instead of velocity.
@@ -184,7 +200,7 @@ def mms_part_slice2d(trange=None,
if trange is None:
if time is None:
- print('Please specify a time or time range over which to compute the slice.')
+ logging.error('Please specify a time or time range over which to compute the slice.')
return
trange_data = [time_double(time)-60, time_double(time)+60]
else:
@@ -235,7 +251,7 @@ def mms_part_slice2d(trange=None,
dists = mms_get_hpca_dist('mms' + probe + '_hpca_' + species + '_phase_space_density', species=species,
probe=probe, data_rate=data_rate)
else:
- print('Unknown instrument: ' + instrument + '; valid options: fpi, hpca')
+ logging.error('Unknown instrument: ' + instrument + '; valid options: fpi, hpca')
return
bfield = None
@@ -257,11 +273,11 @@ def mms_part_slice2d(trange=None,
mag_data=bfield, vel_data=vbulk, rotation=rotation, resolution=resolution, erange=erange,
energy=energy, log=log, custom_rotation=custom_rotation, subtract_bulk=subtract_bulk,
interpolation=interpolation, thetarange=thetarange, zdirrange=zdirrange, smooth=smooth,
- average_angle=average_angle, sum_angle=sum_angle)
+ average_angle=average_angle, sum_angle=sum_angle, slice_x=slice_x, slice_z=slice_norm)
if return_slice:
return the_slice
plot(the_slice, xrange=xrange, yrange=yrange, zrange=zrange, save_png=save_png, save_svg=save_svg,
save_pdf=save_pdf, save_eps=save_eps, save_jpeg=save_jpeg, display=display, dpi=dpi, plotsize=plotsize,
- contours=contours)
+ contours=contours, colormap=cmap, title=title)
diff --git a/pyspedas/mms/particles/mms_pgs_clean_data.py b/pyspedas/mms/particles/mms_pgs_clean_data.py
index 0d7fdbbc..1036f635 100644
--- a/pyspedas/mms/particles/mms_pgs_clean_data.py
+++ b/pyspedas/mms/particles/mms_pgs_clean_data.py
@@ -1,7 +1,7 @@
-
import numpy as np
from scipy.ndimage.interpolation import shift
+
def mms_pgs_clean_data(data_in):
"""
Sanitize MMS FPI/HPCA data structures for use with
@@ -26,4 +26,4 @@ def mms_pgs_clean_data(data_in):
# just have to make a guess at the edges(top edge)
output['denergy'][-1, :] = de[-1, :]
- return output
\ No newline at end of file
+ return output
diff --git a/pyspedas/mms/particles/mms_pgs_clean_support.py b/pyspedas/mms/particles/mms_pgs_clean_support.py
index 0630523c..8c5b6b3d 100644
--- a/pyspedas/mms/particles/mms_pgs_clean_support.py
+++ b/pyspedas/mms/particles/mms_pgs_clean_support.py
@@ -1,50 +1,49 @@
-
from pyspedas import tinterpol
from pytplot import get_data
+
def mms_pgs_clean_support(times, mag_name=None, vel_name=None, sc_pot_name=None):
- """
- Transform and/or interpolate support data to match the particle data
+ """
+ Transform and/or interpolate support data to match the particle data
- Parameters
+ Parameters
----------
- mag_name: str
- Tplot variable containing magnetic field data
+ mag_name: str
+ Tplot variable containing magnetic field data
- vel_name: str
- Tplot variable containing bulk velocity data
+ vel_name: str
+ Tplot variable containing bulk velocity data
- sc_pot_name: str
- Tplot variable containing spacecraft potential data
+ sc_pot_name: str
+ Tplot variable containing spacecraft potential data
- Returns
+ Returns
----------
- Tuple containing interpolated (magnetic field, velocity, spacecraft potential)
- """
-
- out_mag = None
- out_vel = None
- out_scpot = None
-
- if mag_name is not None:
- mag_temp = mag_name + '_pgs_temp'
- tinterpol(mag_name, times, newname=mag_temp)
- interpolated_bfield = get_data(mag_temp)
- if interpolated_bfield is not None:
- out_mag = interpolated_bfield.y
-
- if vel_name is not None:
- vel_temp = vel_name + '_pgs_temp'
- tinterpol(vel_name, times, newname=vel_temp)
- interpolated_vel = get_data(vel_temp)
- if interpolated_vel is not None:
- out_vel = interpolated_vel.y
-
- if sc_pot_name is not None:
- scpot_temp = sc_pot_name + '_pgs_temp'
- tinterpol(sc_pot_name, times, newname=scpot_temp)
- interpolated_scpot = get_data(scpot_temp)
- if interpolated_scpot is not None:
- out_scpot = interpolated_scpot.y
-
- return (out_mag, out_vel, out_scpot)
\ No newline at end of file
+ Tuple containing interpolated (magnetic field, velocity, spacecraft potential)
+ """
+ out_mag = None
+ out_vel = None
+ out_scpot = None
+
+ if mag_name is not None:
+ mag_temp = mag_name + '_pgs_temp'
+ tinterpol(mag_name, times, newname=mag_temp)
+ interpolated_bfield = get_data(mag_temp)
+ if interpolated_bfield is not None:
+ out_mag = interpolated_bfield.y
+
+ if vel_name is not None:
+ vel_temp = vel_name + '_pgs_temp'
+ tinterpol(vel_name, times, newname=vel_temp)
+ interpolated_vel = get_data(vel_temp)
+ if interpolated_vel is not None:
+ out_vel = interpolated_vel.y
+
+ if sc_pot_name is not None:
+ scpot_temp = sc_pot_name + '_pgs_temp'
+ tinterpol(sc_pot_name, times, newname=scpot_temp)
+ interpolated_scpot = get_data(scpot_temp)
+ if interpolated_scpot is not None:
+ out_scpot = interpolated_scpot.y
+
+ return out_mag, out_vel, out_scpot
diff --git a/pyspedas/mms/particles/mms_pgs_make_e_spec.py b/pyspedas/mms/particles/mms_pgs_make_e_spec.py
index 4ea90509..c562a51a 100644
--- a/pyspedas/mms/particles/mms_pgs_make_e_spec.py
+++ b/pyspedas/mms/particles/mms_pgs_make_e_spec.py
@@ -1,5 +1,3 @@
-
-
import numpy as np
# use nansum from bottleneck if it's installed, otherwise use the numpy one
@@ -9,8 +7,31 @@
except ImportError:
nansum = np.nansum
+
def mms_pgs_make_e_spec(data_in):
+ """
+ Builds an energy spectrogram from a simplified particle data structure.
+
+ Parameters
+ ----------
+ data_in : dict
+ The input data structure.
+
+ Returns
+ -------
+ outtable : ndarray, shape (ny,)
+ The energy bins.
+ ave : ndarray, shape (ny,)
+ The spectrogram.
+ Notes
+ -----
+ - Each energy bin in the output spectrogram (`ave`) is the weighted average
+ of the corresponding bins in the input data (`data_in`).
+ - The input data is sanitized by zeroing inactive bins to ensure areas with
+ no data are represented as NaN.
+ - The function uses the first energy table for rebinning the data.
+ """
data = data_in.copy()
# zero inactive bins to ensure areas with no data are represented as NaN
@@ -37,7 +58,7 @@ def mms_pgs_make_e_spec(data_in):
else:
ave = outbins/data['bins']
- return (outtable, ave)
+ return outtable, ave
def find_nearest_neighbor(table, item):
diff --git a/pyspedas/mms/particles/mms_pgs_make_fac.py b/pyspedas/mms/particles/mms_pgs_make_fac.py
index 1b89a957..aff539a8 100644
--- a/pyspedas/mms/particles/mms_pgs_make_fac.py
+++ b/pyspedas/mms/particles/mms_pgs_make_fac.py
@@ -1,18 +1,16 @@
-
import logging
import numpy as np
-
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
from pyspedas.cotrans.cotrans import cotrans
-from pyspedas.analysis.tnormalize import tnormalize
-from pyspedas.analysis.tcrossp import tcrossp
+from pytplot import tnormalize
+from pytplot import tcrossp
from pyspedas.analysis.tinterpol import tinterpol
-
from pytplot import get_data, store_data
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def mms_pgs_xgse(mag_temp, pos_temp):
"""
Generates the 'xgse' transformation matrix
@@ -29,7 +27,8 @@ def mms_pgs_xgse(mag_temp, pos_temp):
y_basis = tnormalize(y_basis, return_data=True)
x_basis = tcrossp(y_basis, z_basis, return_data=True)
- return (x_basis, y_basis, z_basis)
+ return x_basis, y_basis, z_basis
+
def mms_pgs_phigeo(mag_temp, pos_temp):
"""
@@ -58,7 +57,8 @@ def mms_pgs_phigeo(mag_temp, pos_temp):
x_basis = tnormalize(x_basis, return_data=True)
y_basis = tcrossp(z_basis, x_basis, return_data=True)
- return (x_basis, y_basis, z_basis)
+ return x_basis, y_basis, z_basis
+
def mms_pgs_mphigeo(mag_temp, pos_temp):
"""
@@ -88,7 +88,7 @@ def mms_pgs_mphigeo(mag_temp, pos_temp):
x_basis = tnormalize(x_basis, return_data=True)
y_basis = tcrossp(z_basis, x_basis, return_data=True)
- return (x_basis, y_basis, z_basis)
+ return x_basis, y_basis, z_basis
def mms_pgs_make_fac(times, mag_tvar_in, pos_tvar_in, fac_type='mphigeo'):
@@ -132,4 +132,4 @@ def mms_pgs_make_fac(times, mag_tvar_in, pos_tvar_in, fac_type='mphigeo'):
fac_output[:, 1, :] = basis[1]
fac_output[:, 2, :] = basis[2]
- return fac_output
\ No newline at end of file
+ return fac_output
diff --git a/pyspedas/mms/particles/mms_pgs_make_phi_spec.py b/pyspedas/mms/particles/mms_pgs_make_phi_spec.py
index 409d5309..b9da1440 100644
--- a/pyspedas/mms/particles/mms_pgs_make_phi_spec.py
+++ b/pyspedas/mms/particles/mms_pgs_make_phi_spec.py
@@ -1,4 +1,3 @@
-
import numpy as np
# use nansum from bottleneck if it's installed, otherwise use the numpy one
@@ -8,8 +7,32 @@
except ImportError:
nansum = np.nansum
-def mms_pgs_make_phi_spec(data_in, resolution=32):
+def mms_pgs_make_phi_spec(data_in, resolution=32):
+ """
+ Builds phi (longitudinal) spectrogram from a sanitized particle data structure.
+
+ Parameters
+ ----------
+ data_in : dict
+ The sanitized particle data structure containing 'phi', 'data', and 'bins' arrays.
+ resolution : int, optional
+ The number of bins to divide the 360 degrees of phi into. Default is 32.
+
+ Returns
+ -------
+ y : array
+ The bin centers for the phi spectrogram.
+ ave : array
+ The phi spectrogram with shape (n_phi,).
+
+ Notes
+ -----
+ This function concatenates the sample's data to the `spec` variable. Both
+ the spectrogram `spec` and the y-axis `yaxis` will be initialized if not set.
+ The y-axis will remain a single dimension until a change is detected in the data,
+ at which point it will be expanded to two dimensions.
+ """
data = data_in.copy()
n_phi = resolution
@@ -30,8 +53,10 @@ def mms_pgs_make_phi_spec(data_in, resolution=32):
for bin_idx in range(0, len(outbins)-1):
this_bin = np.argwhere((phi_flat >= outbins[bin_idx]) & (phi_flat < outbins[bin_idx+1]))
if len(this_bin) > 0:
- ave[bin_idx] += nansum(data_flat[this_bin])/nansum(bins_flat[this_bin])
+ bins = nansum(bins_flat[this_bin])
+ if bins != 0.0:
+ ave[bin_idx] += nansum(data_flat[this_bin])/bins
y = outbins[0:n_phi]+0.5*(outbins[1::]-outbins[0:n_phi])
- return (y, ave)
+ return y, ave
diff --git a/pyspedas/mms/particles/mms_pgs_make_theta_spec.py b/pyspedas/mms/particles/mms_pgs_make_theta_spec.py
index a6079bf8..15719769 100644
--- a/pyspedas/mms/particles/mms_pgs_make_theta_spec.py
+++ b/pyspedas/mms/particles/mms_pgs_make_theta_spec.py
@@ -1,5 +1,3 @@
-
-
import numpy as np
# use nansum from bottleneck if it's installed, otherwise use the numpy one
@@ -9,7 +7,27 @@
except ImportError:
nansum = np.nansum
+
def mms_pgs_make_theta_spec(data_in, resolution=16, colatitude=False):
+ """
+ Builds a theta (latitudinal) spectrogram from a simplified particle data structure.
+
+ Parameters
+ ----------
+ data_in : dict
+ A dictionary containing the particle data, including 'data', 'theta', and 'bins' keys.
+ resolution : int, optional
+ The number of bins to use for the spectrogram. Defaults to 16.
+ colatitude : bool, optional
+ Set to True if the input data is in colatitude rather than latitude.
+
+ Returns
+ -------
+ y : numpy.ndarray
+ The y axis of the spectrogram.
+ ave : numpy.ndarray
+ The spectrogram.
+ """
data = data_in.copy()
n_theta = resolution
@@ -34,7 +52,9 @@ def mms_pgs_make_theta_spec(data_in, resolution=16, colatitude=False):
for bin_idx in range(0, len(outbins)-1):
this_bin = np.argwhere((theta_flat >= outbins[bin_idx]) & (theta_flat < outbins[bin_idx+1]))
if len(this_bin) > 0:
- ave[bin_idx] += nansum(data_flat[this_bin])/nansum(bins_flat[this_bin])
+ bins = nansum(bins_flat[this_bin])
+ if bins != 0.0:
+ ave[bin_idx] += nansum(data_flat[this_bin])/bins
if not colatitude:
data['theta'] = 90.0-data['theta']
@@ -42,4 +62,4 @@ def mms_pgs_make_theta_spec(data_in, resolution=16, colatitude=False):
y = outbins[0:n_theta]+0.5*(outbins[1::]-outbins[0:n_theta])
- return (y, ave)
\ No newline at end of file
+ return y, ave
diff --git a/pyspedas/mms/particles/mms_pgs_split_hpca.py b/pyspedas/mms/particles/mms_pgs_split_hpca.py
index 99134f74..d7feab41 100644
--- a/pyspedas/mms/particles/mms_pgs_split_hpca.py
+++ b/pyspedas/mms/particles/mms_pgs_split_hpca.py
@@ -1,6 +1,6 @@
-
import numpy as np
+
def mms_pgs_split_hpca(data_in):
"""
Split hpca elevation bins so that dphi == dtheta.
@@ -16,4 +16,4 @@ def mms_pgs_split_hpca(data_in):
clean_data['theta'] = np.concatenate((clean_data['theta']+0.25*clean_data['dtheta'], clean_data['theta']-0.25*clean_data['dtheta']), axis=1)
clean_data['dtheta'] = np.concatenate((clean_data['dtheta']/2.0, clean_data['dtheta']/2.0), axis=1)
- return clean_data
\ No newline at end of file
+ return clean_data
diff --git a/pyspedas/mms/particles/moka_mms_clean_data.py b/pyspedas/mms/particles/moka_mms_clean_data.py
new file mode 100644
index 00000000..7e0bc7a5
--- /dev/null
+++ b/pyspedas/mms/particles/moka_mms_clean_data.py
@@ -0,0 +1,74 @@
+import logging
+import numpy as np
+from scipy.ndimage.interpolation import shift
+from pyspedas.mms.particles.mms_convert_flux_units import mms_convert_flux_units
+
+
+def moka_mms_clean_data(data_in, units=None, disterr=None):
+ """
+ This is a translation of Mitsuo Oka's IDL routine: moka_mms_clean_data
+ """
+ if units is None:
+ logging.error('Units must be specified.')
+ return
+
+ data = mms_convert_flux_units(data_in, units=units)
+ data_psd = mms_convert_flux_units(data_in, units='df_km')
+
+ output = {'charge': data_in['charge'], 'mass': data_in['mass'],
+ 'data': np.reshape(data_in['data'], [data_in['data'].shape[0]*data_in['data'].shape[1]*data_in['data'].shape[2]], order='F'),
+ 'bins': np.reshape(data_in['bins'], [data_in['data'].shape[0]*data_in['data'].shape[1]*data_in['data'].shape[2]], order='F'),
+ 'theta': np.reshape(data_in['theta'], [data_in['data'].shape[0]*data_in['data'].shape[1]*data_in['data'].shape[2]], order='F'),
+ 'energy': np.reshape(data_in['energy'], [data_in['data'].shape[0], data_in['data'].shape[1]*data_in['data'].shape[2]], order='F'),
+ 'phi': np.reshape(data_in['phi'], [data_in['data'].shape[0]*data_in['data'].shape[1]*data_in['data'].shape[2]], order='F'),
+ 'dtheta': np.reshape(data_in['dtheta'], [data_in['data'].shape[0]*data_in['data'].shape[1]*data_in['data'].shape[2]], order='F'),
+ 'dphi': np.reshape(data_in['dphi'], [data_in['data'].shape[0]*data_in['data'].shape[1]*data_in['data'].shape[2]], order='F'),
+ 'denergy': np.reshape(data_in['denergy'], [data_in['data'].shape[0]*data_in['data'].shape[1]*data_in['data'].shape[2]], order='F')}
+
+ de = output['energy'] - shift(output['energy'], [1, 0])
+ output['denergy'] = shift((de+shift(de, [1, 0]))/2.0, -1)
+ # just have to make a guess at the edges(bottom edge)
+ output['denergy'][0, :] = de[1, :]
+ # just have to make a guess at the edges(top edge)
+ output['denergy'][-1, :] = de[-1, :]
+
+ dims = data['data'].shape
+ imax = dims[0]*dims[1]*dims[2]
+
+ output['energy'] = np.reshape(output['energy'], [output['energy'].shape[0] * output['energy'].shape[1]],
+ order='F')
+ output['denergy'] = np.reshape(output['denergy'], [output['denergy'].shape[0] * output['denergy'].shape[1]],
+ order='F')
+
+ # Error
+ psd = output['data']
+ if disterr is None:
+ err = np.zeros(imax)
+ cnt = np.zeros(imax)
+ else:
+ data_err = mms_convert_flux_units(disterr, units='df_km')
+ err = data_err['data']
+ cnt = (psd/err)**2 # actual counts recovered
+
+ # NaN
+ dat = output['data']
+ bins = output['bins']
+ idx = np.argwhere(bins == False)
+ if len(idx) > 0:
+ dat[idx] = 0.0
+ psd[idx] = 0.0
+ err[idx] = 0.0
+ cnt[idx] = 0.0
+
+ dat = np.nan_to_num(dat, nan=0.0)
+ psd = np.nan_to_num(psd, nan=0.0)
+ err = np.nan_to_num(err, nan=0.0)
+ cnt = np.nan_to_num(cnt, nan=0.0)
+
+ output['data_dat'] = dat
+ output['data_psd'] = psd
+ output['data_err'] = err
+ output['data_cnt'] = cnt
+ output['pa'] = np.zeros(imax)
+
+ return output
diff --git a/pyspedas/mms/print_vars.py b/pyspedas/mms/print_vars.py
index 9558b2ec..5371215c 100644
--- a/pyspedas/mms/print_vars.py
+++ b/pyspedas/mms/print_vars.py
@@ -1,6 +1,7 @@
-
+import logging
from pyspedas.mms.mms_config import CONFIG
+
# the following decorator prints the loaded tplot variables after each load routine call
def print_vars(func):
def wrapper(*args, **kwargs):
@@ -8,11 +9,11 @@ def wrapper(*args, **kwargs):
if variables is None:
return None
if kwargs.get('available') or CONFIG['download_only']:
- print('Available files:')
+ logging.info('Available files:')
else:
- print('Loaded variables:')
+ logging.info('Loaded variables:')
for var in variables:
- print(var)
+ logging.info(var)
return variables
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
diff --git a/pyspedas/mms/scm/mms_scm_set_metadata.py b/pyspedas/mms/scm/mms_scm_set_metadata.py
index 1cfa3391..f9e3e8e9 100644
--- a/pyspedas/mms/scm/mms_scm_set_metadata.py
+++ b/pyspedas/mms/scm/mms_scm_set_metadata.py
@@ -1,5 +1,6 @@
from pytplot import options
+
def mms_scm_set_metadata(tnames, probe, datatype, coord, suffix=''):
"""
This function updates the metadata for the SCM data products
@@ -28,18 +29,14 @@ def mms_scm_set_metadata(tnames, probe, datatype, coord, suffix=''):
for tvar in tnames:
if tvar == 'mms'+probe+'_scm_'+datatype+'_'+coord+suffix:
- options(tvar, 'color', ['b', 'g', 'r'])
options(tvar, 'ytitle', 'MMS'+probe+' '+datatype+' ('+coord+')')
options(tvar, 'legend_names', ['1', '2', '3'])
elif tvar == 'mms'+probe+'_scm_acb_'+coord+'_scsrvy_srvy_l2'+suffix:
- options(tvar, 'color', ['b', 'g', 'r'])
options(tvar, 'legend_names', ['Bx', 'By', 'Bz'])
options(tvar, 'ytitle', 'MMS'+probe+' SCM '+datatype)
elif tvar == 'mms'+probe+'_scm_acb_'+coord+'_scb_brst_l2'+suffix:
- options(tvar, 'color', ['b', 'g', 'r'])
options(tvar, 'legend_names', ['Bx', 'By', 'Bz'])
options(tvar, 'ytitle', 'MMS'+probe+' SCM '+datatype)
elif tvar == 'mms'+probe+'_scm_acb_'+coord+'_schb_brst_l2'+suffix:
- options(tvar, 'color', ['b', 'g', 'r'])
options(tvar, 'legend_names', ['Bx', 'By', 'Bz'])
options(tvar, 'ytitle', 'MMS'+probe+' SCM '+datatype)
diff --git a/pyspedas/mms/scm/scm.py b/pyspedas/mms/scm/scm.py
index fde95d75..e1e09a12 100644
--- a/pyspedas/mms/scm/scm.py
+++ b/pyspedas/mms/scm/scm.py
@@ -1,21 +1,21 @@
-
from pyspedas.mms.mms_load_data import mms_load_data
from pyspedas.mms.scm.mms_scm_set_metadata import mms_scm_set_metadata
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
+
@print_vars
def mms_load_scm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy',
level='l2', datatype='', varformat=None, varnames=[], suffix='', get_support_data=False,
time_clip=True, no_update=False, available=False, notplot=False, latest_version=False,
major_version=False, min_version=None, cdf_version=None, spdf=False, always_prompt=False):
"""
- This function loads SCM data into tplot variables
+ Load data from the Search Coil Magnetometer (SCM)
Parameters
----------
trange : list of str
- time range of interest [starttime, endtime] with the format
+ time range of interest [start time, end time] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
@@ -56,11 +56,11 @@ def mms_load_scm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
- access to multi-dimensional data products)
+ access to multidimensional data products)
available: bool
If True, simply return the available data files (without downloading)
- for the requested paramters
+ for the requested parameters
no_update: bool
Set this flag to preserve the original data. if not set and newer
@@ -80,12 +80,13 @@ def mms_load_scm(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
- useful if you accidently save an incorrect password, or if your SDC password has changed
+ useful if you accidentally save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
- Returns:
+ Returns
+ ---------
List of tplot variables created.
"""
diff --git a/pyspedas/mms/spd_mms_load_bss.py b/pyspedas/mms/spd_mms_load_bss.py
index 19d60fe7..f0585c76 100644
--- a/pyspedas/mms/spd_mms_load_bss.py
+++ b/pyspedas/mms/spd_mms_load_bss.py
@@ -1,9 +1,10 @@
-
+import logging
from pyspedas import time_double
from pyspedas.mms.mms_load_fast_segments import mms_load_fast_segments
from pyspedas.mms.mms_load_sroi_segments import mms_load_sroi_segments
from pyspedas.mms.mms_load_brst_segments import mms_load_brst_segments
+
def spd_mms_load_bss(trange=['2015-10-16', '2015-10-17'], datatype=['fast', 'burst'],
include_labels=False, probe='1', nodownload=False):
"""
@@ -46,8 +47,8 @@ def spd_mms_load_bss(trange=['2015-10-16', '2015-10-17'], datatype=['fast', 'bur
elif dtype == 'burst':
out = mms_load_brst_segments(trange=trange)
else:
- print('Unsupported datatype: ' + dtype + '; valid options: "fast" and "burst"')
+ logging.error('Unsupported datatype: ' + dtype + '; valid options: "fast" and "burst"')
continue
if out is None:
- print('Problem loading segments for ' + dtype)
+ logging.error('Problem loading segments for ' + dtype)
diff --git a/pyspedas/mms/tests/cotrans.py b/pyspedas/mms/tests/cotrans.py
index 8787ba79..90f8b44c 100644
--- a/pyspedas/mms/tests/cotrans.py
+++ b/pyspedas/mms/tests/cotrans.py
@@ -1,7 +1,9 @@
import unittest
import pyspedas
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists, tplot_rename, set_coords
from pyspedas.mms.cotrans.mms_qcotrans import mms_qcotrans
+from pyspedas.mms.cotrans.mms_cotrans_lmn import mms_cotrans_lmn
+
class CotransTestCases(unittest.TestCase):
def test_qcotrans_sm_to_gse(self):
@@ -10,6 +12,57 @@ def test_qcotrans_sm_to_gse(self):
mms_qcotrans('mms1_mec_r_sm', 'mms1_mec_r_sm_2gse', out_coord='gse')
self.assertTrue(data_exists('mms1_mec_v_sm_2gse'))
self.assertTrue(data_exists('mms1_mec_r_sm_2gse'))
+ mms_qcotrans(['mms1_mec_r_sm', 'mms1_mec_v_sm'], ['mms1_mec_r_sm_2gse', 'mms1_mec_v_sm_2gse'], out_coord=['gse', 'gse'])
+
+ def test_qcotrans_fgm_sm_to_gse(self):
+ pyspedas.mms.mec()
+ pyspedas.mms.fgm()
+ mms_qcotrans('mms1_fgm_b_gsm_srvy_l2_bvec', 'mms1_fgm_b_gsm_brst_l2_bvec_2gse', probe=1, out_coord='gse')
+ self.assertTrue(data_exists('mms1_fgm_b_gsm_brst_l2_bvec_2gse'))
+
+ def test_qcotrans_errors(self):
+ pyspedas.mms.mec()
+ # in_name not specified
+ mms_qcotrans(out_name='mms1_mec_v_sm_2gse', out_coord='gse')
+ # out_name not specified
+ mms_qcotrans(in_name='mms1_mec_v_sm_2gse', out_coord='gse')
+ # in_coord not specified, and not set in metadata
+ set_coords('mms1_mec_v_sm', '')
+ mms_qcotrans(in_name='mms1_mec_v_sm', out_name='mms1_mec_v_sm_2gse', out_coord='gse')
+ # invalid in_coord
+ set_coords('mms1_mec_v_sm', '')
+ mms_qcotrans(in_name='mms1_mec_v_sm', out_name='mms1_mec_v_sm_2gse2', out_coord='gse')
+ # invalid out_coord
+ set_coords('mms1_mec_v_sm', 'sm')
+ mms_qcotrans(in_name='mms1_mec_v_sm', out_name='mms1_mec_v_sm_2gse2', out_coord='gse2')
+ # trouble extracting probe from var name
+ tplot_rename('mms1_mec_v_sm', 'mmsx_mec_v_sm')
+ mms_qcotrans(in_name='mmsx_mec_v_sm', out_name='mms1_mec_v_sm_2gse2', out_coord='gse2')
+ tplot_rename('mms1_mec_v_sm', 'smvar')
+ mms_qcotrans(in_name='smvar', out_name='mms1_mec_v_sm_2gse', out_coord='gse')
+ # should warn when you're transforming to ssl/bcs coordinates
+ mms_qcotrans(out_name='mms1_mec_v_sm_2ssl', out_coord='ssl')
+ mms_qcotrans(out_name='mms1_mec_v_sm_2bcs', out_coord='bcs')
+ # unsupported coordinate system
+ mms_qcotrans(in_name='mms1_mec_v_sm', out_name='mms1_mec_v_sm_2gse', in_coord='unsupported', out_coord='gse')
+
+
+ def test_lmn(self):
+ pyspedas.mms.fgm(trange=['2015-10-16/13:00', '2015-10-16/13:10'], data_rate='brst')
+ mms_cotrans_lmn('mms1_fgm_b_gsm_brst_l2_bvec', 'mms1_fgm_b_gsm_brst_l2_bvec_2lmn')
+ self.assertTrue(data_exists('mms1_fgm_b_gsm_brst_l2_bvec_2lmn'))
+
+ def test_lmn_errors(self):
+ pyspedas.mms.fgm(trange=['2015-10-16/13:00', '2015-10-16/13:10'], data_rate='brst')
+ # invalid variable name
+ mms_cotrans_lmn('mms1_fgm_b_gsm_brst_l2_bvec2', 'mms1_fgm_b_gsm_brst_l2_bvec_2lmn')
+ # invalid coordinate system
+ set_coords('mms1_fgm_b_gsm_brst_l2_bvec', 'gse2')
+ mms_cotrans_lmn('mms1_fgm_b_gsm_brst_l2_bvec', 'mms1_fgm_b_gsm_brst_l2_bvec_2lmn')
+ # problem extracting probe from variable name
+ tplot_rename('mms1_fgm_b_gsm_brst_l2_bvec', 'mmsx_fgm_b_gsm_brst_l2_bvec')
+ mms_cotrans_lmn('mmsx_fgm_b_gsm_brst_l2_bvec', 'mms1_fgm_b_gsm_brst_l2_bvec_2lmn')
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/mms/tests/curlometer.py b/pyspedas/mms/tests/curlometer.py
index d5a544a1..e50447c6 100644
--- a/pyspedas/mms/tests/curlometer.py
+++ b/pyspedas/mms/tests/curlometer.py
@@ -1,7 +1,7 @@
import unittest
import pyspedas
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
from pyspedas.mms.fgm.mms_lingradest import mms_lingradest
class CurlTestCases(unittest.TestCase):
diff --git a/pyspedas/mms/tests/data_rate_segments.py b/pyspedas/mms/tests/data_rate_segments.py
index 09279aee..b7de0491 100644
--- a/pyspedas/mms/tests/data_rate_segments.py
+++ b/pyspedas/mms/tests/data_rate_segments.py
@@ -1,11 +1,11 @@
-
import unittest
-
from pyspedas.mms.spd_mms_load_bss import spd_mms_load_bss
-from pyspedas.mms.mms_load_sroi_segments import mms_load_sroi_segments
+from pyspedas.mms.mms_load_sroi_segments import mms_load_sroi_segments, get_mms_srois
from pyspedas.mms.mms_load_fast_segments import mms_load_fast_segments
from pyspedas.mms.mms_load_brst_segments import mms_load_brst_segments
-from pyspedas.utilities.data_exists import data_exists
+from pyspedas.mms.mms_update_brst_intervals import mms_update_brst_intervals
+from pytplot import data_exists
+
class SegmentTestCases(unittest.TestCase):
def test_sroi(self):
@@ -14,18 +14,46 @@ def test_sroi(self):
self.assertTrue(len(sroi[0]) == 28)
self.assertTrue(sroi[0][0] == 1569849345.0)
self.assertTrue(sroi[1][0] == 1569923029.0)
+ # error, no trange specified
+ sroi = mms_load_sroi_segments()
+ # error, start time not specified
+ none = get_mms_srois(end_time=1569849345.0)
+ # error, end time not specified
+ none = get_mms_srois(start_time=1569849345.0)
+ # error, probe not specified
+ none = get_mms_srois(start_time=1569849345.0, end_time=1569849345.0)
+
def test_brst(self):
brst = mms_load_brst_segments(trange=['2015-10-16', '2015-10-17'])
self.assertTrue(len(brst[0]) == 53)
self.assertTrue(brst[0][0] == 1444975174.0)
self.assertTrue(brst[1][0] == 1444975244.0)
self.assertTrue(data_exists('mms_bss_burst'))
+ # error, no trange specified
+ brst = mms_load_brst_segments()
+ # download from spedas.org
+ brst = mms_load_brst_segments(trange=['2015-10-16', '2015-10-17'], sdc=False, suffix='_sdc')
+ self.assertTrue(len(brst[0]) == 53)
+ self.assertTrue(brst[0][0] == 1444975174.0)
+ self.assertTrue(brst[1][0] == 1444975244.0)
+ self.assertTrue(data_exists('mms_bss_burst_sdc'))
+
+ def test_update_brst_intervals(self):
+ intervals = mms_update_brst_intervals()
+ self.assertTrue('start_times' in intervals)
+ self.assertTrue('end_times' in intervals)
+ self.assertTrue(intervals['start_times'][0] == 1430876725.0)
+ self.assertTrue(intervals['end_times'][0] == 1430879715.0)
+
def test_fast(self):
fast = mms_load_fast_segments(trange=['2015-10-01', '2015-11-01'])
self.assertTrue(data_exists('mms_bss_fast'))
self.assertTrue(len(fast[0]) == 35)
self.assertTrue(fast[0][0] == 1443504404.0)
self.assertTrue(fast[1][0] == 1443554774.0)
+ # error, no trange specified
+ none = mms_load_fast_segments()
+
def test_spd_mms_load_bss(self):
spd_mms_load_bss(trange=['2015-10-01', '2015-11-01'])
self.assertTrue(data_exists('mms_bss_fast'))
@@ -34,5 +62,10 @@ def test_spd_mms_load_bss(self):
self.assertTrue(data_exists('mms_bss_burst'))
self.assertTrue(data_exists('mms1_bss_sroi'))
+ def test_spd_mms_load_bss_err(self):
+ spd_mms_load_bss(trange=['2015-10-01', '2015-11-01'], datatype='brst', include_labels=True)
+ self.assertTrue(~data_exists('mms_bss_fast'))
+
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/mms/tests/eis.py b/pyspedas/mms/tests/eis.py
index fa5420b8..6bb64a86 100644
--- a/pyspedas/mms/tests/eis.py
+++ b/pyspedas/mms/tests/eis.py
@@ -1,9 +1,18 @@
import unittest
-import numpy as np
from pyspedas import mms_load_eis, mms_eis_pad
-from pyspedas.utilities.data_exists import data_exists
+from pyspedas.mms.eis.mms_eis_omni import mms_eis_omni
+from pytplot import data_exists
+from pyspedas.mms.eis.mms_eis_spec_combine_sc import mms_eis_spec_combine_sc
+from pytplot import tplot
class EISTestCases(unittest.TestCase):
+ def test_electronenergy(self):
+ mms_load_eis(datatype='electronenergy')
+ self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_electronenergy_electron_flux_omni'))
+ self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_electronenergy_electron_flux_omni_spin'))
+ tplot('mms1_epd_eis_srvy_l2_electronenergy_electron_flux_omni', display=False)
+ tplot('mms1_epd_eis_srvy_l2_electronenergy_electron_flux_omni_spin', display=False)
+
def test_pad_extof_srvy(self):
mms_load_eis(datatype='extof')
mms_eis_pad(datatype='extof')
@@ -13,44 +22,72 @@ def test_pad_extof_srvy(self):
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_proton_flux_omni'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_oxygen_energy_range'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_proton_energy_range'))
+ tplot('mms1_epd_eis_srvy_l2_extof_proton_flux_omni_spin', display=False)
+ tplot('mms1_epd_eis_srvy_l2_extof_46-10489keV_proton_flux_omni_pad_spin', display=False)
+ tplot('mms1_epd_eis_srvy_l2_extof_46-10489keV_proton_flux_omni_pad', display=False)
def test_pad_extof_srvy_probe(self):
mms_load_eis(probe=4)
- mms_eis_pad(probe=4)
- self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_44-1315keV_proton_flux_omni_pad_spin'))
- self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_44-1315keV_proton_flux_omni_pad'))
- self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_proton_flux_omni_spin'))
- self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_proton_flux_omni'))
+ mms_eis_omni(probe=4, data_units='cps')
+ mms_eis_omni(probe=4, data_units='counts')
+ mms_eis_pad(probe=4, data_units='cps')
+ self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_44-1315keV_proton_cps_omni_pad_spin'))
+ self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_44-1315keV_proton_cps_omni_pad'))
+ self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_proton_cps_omni'))
self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_oxygen_energy_range'))
self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_proton_energy_range'))
+ self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_proton_counts_omni'))
+ tplot('mms4_epd_eis_srvy_l2_extof_proton_counts_omni', display=False)
+ tplot('mms4_epd_eis_srvy_l2_extof_44-1315keV_proton_cps_omni_pad', display=False)
+ tplot('mms4_epd_eis_srvy_l2_extof_44-1315keV_proton_cps_omni_pad_spin', display=False)
def test_pad_extof_brst(self):
- mms_load_eis(probe=4, datatype='extof', data_rate='brst', trange=['2015-10-16/13:06', '2015-10-16/13:07'])
+ mms_load_eis(probe=4, datatype='extof', data_rate='brst', trange=['2022-03-03/07:05:00', '2022-03-03/07:08:00'])
mms_eis_pad(probe=4, datatype='extof', data_rate='brst')
- self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_52-878keV_proton_flux_omni_pad_spin'))
- self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_52-878keV_proton_flux_omni_pad'))
+ self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_52-866keV_proton_flux_omni_pad_spin'))
+ self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_52-866keV_proton_flux_omni_pad'))
self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_proton_flux_omni'))
self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_proton_energy_range'))
self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_oxygen_energy_range'))
+ tplot('mms4_epd_eis_brst_l2_extof_52-866keV_proton_flux_omni_pad', display=False)
+ tplot('mms4_epd_eis_brst_l2_extof_52-866keV_proton_flux_omni_pad_spin', display=False)
def test_load_phxtof_data(self):
data = mms_load_eis(trange=['2015-10-16', '2015-10-16/01:00'], datatype='phxtof')
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_flux_omni'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_t5_energy_dminus'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_t5_energy_dplus'))
+ tplot('mms1_epd_eis_srvy_l2_phxtof_proton_flux_omni', display=False)
def test_load_phxtof_spdf(self):
- data = mms_load_eis(trange=['2015-10-16/13:06', '2015-10-16/13:07'], datatype='phxtof', data_rate='brst', spdf=True)
- self.assertTrue(data_exists('mms1_epd_eis_brst_l2_phxtof_proton_flux_omni'))
- self.assertTrue(data_exists('mms1_epd_eis_brst_l2_phxtof_proton_t5_energy_dminus'))
- self.assertTrue(data_exists('mms1_epd_eis_brst_l2_phxtof_proton_t5_energy_dplus'))
+ data = mms_load_eis(trange=['2015-10-16', '2015-10-16/01:00'], datatype='phxtof', spdf=True)
+ self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_flux_omni'))
+ self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_t5_energy_dminus'))
+ self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_t5_energy_dplus'))
+ tplot('mms1_epd_eis_srvy_l2_phxtof_proton_flux_omni', display=False)
def test_load_extof_suffix(self):
- data = mms_load_eis(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='extof', suffix='_test')
- self.assertTrue(data_exists('mms1_epd_eis_brst_l2_extof_proton_flux_omni_test'))
- self.assertTrue(data_exists('mms1_epd_eis_brst_l2_extof_proton_t5_energy_dminus_test'))
- self.assertTrue(data_exists('mms1_epd_eis_brst_l2_extof_proton_t5_energy_dminus_test'))
+ data = mms_load_eis(trange=['2015-10-16', '2015-10-17'], datatype='extof', suffix='_test')
+ self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_proton_flux_omni_test_spin'))
+ self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_proton_flux_omni_test'))
+ tplot('mms1_epd_eis_srvy_l2_extof_proton_flux_omni_test_spin', display=False)
+ tplot('mms1_epd_eis_srvy_l2_extof_proton_flux_omni_test', display=False)
+
+ def test_combine_sc(self):
+ trange = ['2022-03-03/07:05:00', '2022-03-03/07:08:00']
+ mms_load_eis(probe=[1, 2, 3, 4], datatype='extof', data_rate='brst', trange=trange)
+ mms_load_eis(probe=[1, 2, 3, 4], datatype='phxtof', data_rate='brst', trange=trange)
+ mms_eis_spec_combine_sc(datatype='extof', data_rate='brst')
+ mms_eis_spec_combine_sc(datatype='phxtof', data_rate='brst')
+ self.assertTrue(data_exists('mmsx_epd_eis_brst_l2_extof_proton_flux_omni'))
+ self.assertTrue(data_exists('mmsx_epd_eis_brst_l2_extof_proton_flux_omni_spin'))
+ self.assertTrue(data_exists('mmsx_epd_eis_brst_l2_phxtof_proton_flux_omni'))
+ self.assertTrue(data_exists('mmsx_epd_eis_brst_l2_phxtof_proton_flux_omni_spin'))
+ tplot('mmsx_epd_eis_brst_l2_extof_proton_flux_omni', display=False)
+ tplot('mmsx_epd_eis_brst_l2_extof_proton_flux_omni_spin', display=False)
+ tplot('mmsx_epd_eis_brst_l2_phxtof_proton_flux_omni', display=False)
+ tplot('mmsx_epd_eis_brst_l2_phxtof_proton_flux_omni_spin', display=False)
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/mms/tests/events.py b/pyspedas/mms/tests/events.py
new file mode 100644
index 00000000..e5c2fb0d
--- /dev/null
+++ b/pyspedas/mms/tests/events.py
@@ -0,0 +1,19 @@
+from pyspedas.mms.mms_events import mms_brst_events
+
+import unittest
+
+
+class TestMMSBrstEvents(unittest.TestCase):
+ def test_trange(self):
+ # Test with trange option
+ trange = ['2015-10-16', '2015-10-17']
+ mms_brst_events(trange=trange)
+
+ def test_reload(self):
+ # Test with reload option
+ reload = True
+ mms_brst_events(reload=reload)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/mms/tests/feeps.py b/pyspedas/mms/tests/feeps.py
index 80919b93..40e6957c 100644
--- a/pyspedas/mms/tests/feeps.py
+++ b/pyspedas/mms/tests/feeps.py
@@ -2,10 +2,21 @@
from ..feeps.mms_read_feeps_sector_masks_csv import mms_read_feeps_sector_masks_csv
from pyspedas import mms_load_feeps, mms_feeps_pad
from pyspedas.mms.feeps.mms_feeps_gpd import mms_feeps_gpd
-from ...utilities.data_exists import data_exists
-from pytplot import del_data
+from pytplot import del_data, tplot, data_exists, get
+
class FEEPSTestCases(unittest.TestCase):
+ def test_time_clip_regression(self):
+ # regression test for time clipping bug with spin-averaged spectra
+ mms_load_feeps(trange=['2015-12-15/10:00', '2015-12-15/12:00'], time_clip=True)
+ data = get('mms1_epd_feeps_srvy_l2_electron_intensity_omni_spin')
+ self.assertTrue(data.y[-1, :].sum() != 0.0)
+
+ def test_feeps_sitl(self):
+ mms_load_feeps(datatype='electron', trange=['2016-11-23', '2016-11-24'], data_rate='srvy', probe=4,
+ level='sitl')
+ self.assertTrue(data_exists('mms4_epd_feeps_srvy_sitl_electron_intensity_omni'))
+
def test_feeps_pad_regression(self):
"""
This is a regression test for a bug caused by the v7 of the FEEPS CDF files
@@ -14,6 +25,7 @@ def test_feeps_pad_regression(self):
mms_load_feeps(datatype='electron', trange=['2016-11-23', '2016-11-24'], data_rate='srvy', probe=4)
mms_feeps_pad(probe=4)
self.assertTrue(data_exists('mms4_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad'))
+ tplot('mms4_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad', display=False)
del_data('*')
def test_gyrophase_angles(self):
@@ -22,30 +34,49 @@ def test_gyrophase_angles(self):
self.assertTrue(data_exists('mms3_epd_feeps_brst_l2_electron_intensity_61-77keV_gpd'))
mms_feeps_gpd(probe='3', data_rate='brst')
self.assertTrue(data_exists('mms3_epd_feeps_brst_l2_electron_intensity_50-500keV_gpd'))
+ tplot(['mms3_epd_feeps_brst_l2_electron_intensity_61-77keV_gpd',
+ 'mms3_epd_feeps_brst_l2_electron_intensity_50-500keV_gpd'], display=False)
def test_pad_ions_brst(self):
mms_load_feeps(probe=4, data_rate='brst', datatype='ion', trange=['2015-10-01/10:48:16', '2015-10-01/10:49:16'])
mms_feeps_pad(probe=4, data_rate='brst', datatype='ion', angles_from_bfield=True)
self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_ion_intensity_70-600keV_pad'))
self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_ion_intensity_70-600keV_pad_spin'))
+ tplot(['mms4_epd_feeps_brst_l2_ion_intensity_70-600keV_pad',
+ 'mms4_epd_feeps_brst_l2_ion_intensity_70-600keV_pad_spin'], display=False)
def test_pad_ions_srvy(self):
mms_load_feeps(probe=4, datatype='ion', trange=['2015-10-01/10:48:16', '2015-10-01/10:49:16'])
mms_feeps_pad(probe=4, datatype='ion')
self.assertTrue(data_exists('mms4_epd_feeps_srvy_l2_ion_intensity_70-600keV_pad'))
self.assertTrue(data_exists('mms4_epd_feeps_srvy_l2_ion_intensity_70-600keV_pad_spin'))
+ tplot(['mms4_epd_feeps_srvy_l2_ion_intensity_70-600keV_pad',
+ 'mms4_epd_feeps_srvy_l2_ion_intensity_70-600keV_pad_spin'], display=False)
def test_pad_electrons_srvy(self):
mms_load_feeps()
mms_feeps_pad()
self.assertTrue(data_exists('mms1_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad_spin'))
self.assertTrue(data_exists('mms1_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad'))
+ tplot(['mms1_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad',
+ 'mms1_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad_spin'], display=False)
def test_pad_electrons_srvy_probe(self):
mms_load_feeps(probe=4)
mms_feeps_pad(probe=4)
self.assertTrue(data_exists('mms4_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad_spin'))
self.assertTrue(data_exists('mms4_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad'))
+ tplot(['mms4_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad',
+ 'mms4_epd_feeps_srvy_l2_electron_intensity_70-600keV_pad_spin'], display=False)
+
+ def test_electron_srvy_after_aug17(self):
+ # there's a different set of active eyes after 16 August 2017
+ # this test executes that code
+ mms_load_feeps(probe=4, trange=['2017-12-01', '2017-12-02'])
+ self.assertTrue(data_exists('mms4_epd_feeps_srvy_l2_electron_intensity_omni'))
+ self.assertTrue(data_exists('mms4_epd_feeps_srvy_l2_electron_intensity_omni_spin'))
+ tplot(['mms4_epd_feeps_srvy_l2_electron_intensity_omni',
+ 'mms4_epd_feeps_srvy_l2_electron_intensity_omni_spin'], display=False)
def test_sector_masks(self):
d = mms_read_feeps_sector_masks_csv(['2015-08-01', '2015-08-02'])
@@ -71,5 +102,6 @@ def test_sector_masks(self):
self.assertTrue(d['mms2imaskt9'] == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 53, 54, 60, 61, 62, 63])
self.assertTrue(d['mms1imaskb6'] == [40, 41, 42, 49, 50, 51, 52, 53, 54, 57, 58])
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/mms/tests/fpi_tests.py b/pyspedas/mms/tests/fpi_tests.py
index ae16ea56..77257c8d 100644
--- a/pyspedas/mms/tests/fpi_tests.py
+++ b/pyspedas/mms/tests/fpi_tests.py
@@ -1,11 +1,121 @@
+import numpy as np
import pyspedas
+from pyspedas import mms_load_fpi
import unittest
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists, get_data
from pyspedas.mms.fpi.mms_fpi_split_tensor import mms_fpi_split_tensor
+from pyspedas.mms.fpi.mms_fpi_ang_ang import mms_fpi_ang_ang
+from pyspedas.mms.fpi.mms_get_fpi_dist import mms_get_fpi_dist
+from pyspedas.mms.fpi.mms_pad_fpi import mms_pad_fpi
class FPITestCases(unittest.TestCase):
+ def test_load_default_data(self):
+ data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], available=True)
+ data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'])
+ self.assertTrue(data_exists('mms1_dis_energyspectr_omni_fast'))
+
+ def test_load_spdf_data(self):
+ data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], spdf=True)
+ self.assertTrue(data_exists('mms1_dis_energyspectr_omni_fast'))
+
+ def test_load_small_brst_interval(self):
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype=['dis-moms', 'dis-dist'], time_clip=True)
+ self.assertTrue(data_exists('mms1_dis_energyspectr_omni_brst'))
+
+ def test_load_rename_bars(self):
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='des-dist')
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='dis-dist')
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='des-moms')
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='dis-moms')
+ self.assertTrue(data_exists('mms1_dis_compressionloss_brst_moms'))
+ self.assertTrue(data_exists('mms1_dis_errorflags_brst_moms'))
+ self.assertTrue(data_exists('mms1_des_errorflags_brst_moms'))
+ self.assertTrue(data_exists('mms1_des_compressionloss_brst_moms'))
+ self.assertTrue(data_exists('mms1_des_errorflags_brst_dist'))
+ self.assertTrue(data_exists('mms1_des_compressionloss_brst_dist'))
+ self.assertTrue(data_exists('mms1_dis_errorflags_brst_dist'))
+ self.assertTrue(data_exists('mms1_dis_compressionloss_brst_dist'))
+
+ def test_center_fast_ion_data(self):
+ data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'])
+ centered = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], center_measurement=True, suffix='_centered')
+
+ t, d = get_data('mms1_dis_bulkv_gse_fast')
+ c, d = get_data('mms1_dis_bulkv_gse_fast_centered')
+ self.assertTrue(np.round(c[0]-t[0], decimals=3) == 2.25)
+
+ def test_center_fast_electron_data(self):
+ data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'])
+ centered = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], center_measurement=True, suffix='_centered')
+
+ t, d = get_data('mms1_des_bulkv_gse_fast')
+ c, d = get_data('mms1_des_bulkv_gse_fast_centered')
+ self.assertTrue(np.round(c[0]-t[0], decimals=3) == 2.25)
+
+ def test_center_brst_ion_data(self):
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst')
+ centered = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', center_measurement=True, suffix='_centered')
+
+ t, d = get_data('mms1_dis_bulkv_gse_brst')
+ c, d = get_data('mms1_dis_bulkv_gse_brst_centered')
+ self.assertTrue(np.round(c[0]-t[0], decimals=3) == 0.075)
+
+ def test_center_brst_electron_data(self):
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst')
+ centered = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', center_measurement=True, suffix='_centered')
+
+ t, d = get_data('mms1_des_bulkv_gse_brst')
+ c, d = get_data('mms1_des_bulkv_gse_brst_centered')
+ self.assertTrue(np.round(c[0]-t[0], decimals=3) == 0.015)
+
+ def test_errorflag_compression_bars(self):
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype=['des-dist', 'des-moms'])
+ data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype=['dis-dist', 'dis-moms'])
+ # mms_fpi_make_errorflagbars('mms1_des_errorflags_brst_moms', level='l2')
+ # mms_fpi_make_errorflagbars('mms1_dis_errorflags_brst_moms', level='l2')
+ # mms_fpi_make_errorflagbars('mms1_des_errorflags_brst_dist', level='l2')
+ # mms_fpi_make_errorflagbars('mms1_dis_errorflags_brst_dist', level='l2')
+ # mms_fpi_make_compressionlossbars('mms1_des_compressionloss_brst_moms')
+ # mms_fpi_make_compressionlossbars('mms1_dis_compressionloss_brst_moms')
+ # mms_fpi_make_compressionlossbars('mms1_des_compressionloss_brst_dist')
+ # mms_fpi_make_compressionlossbars('mms1_dis_compressionloss_brst_dist')
+ self.assertTrue(data_exists('mms1_des_errorflags_brst_moms_flagbars_full'))
+ self.assertTrue(data_exists('mms1_des_errorflags_brst_moms_flagbars_main'))
+ self.assertTrue(data_exists('mms1_des_errorflags_brst_moms_flagbars_mini'))
+ self.assertTrue(data_exists('mms1_dis_errorflags_brst_moms_flagbars_full'))
+ self.assertTrue(data_exists('mms1_dis_errorflags_brst_moms_flagbars_main'))
+ self.assertTrue(data_exists('mms1_dis_errorflags_brst_moms_flagbars_mini'))
+ self.assertTrue(data_exists('mms1_des_errorflags_brst_dist_flagbars_dist'))
+ self.assertTrue(data_exists('mms1_dis_errorflags_brst_dist_flagbars_dist'))
+ self.assertTrue(data_exists('mms1_des_compressionloss_brst_moms_flagbars'))
+ self.assertTrue(data_exists('mms1_dis_compressionloss_brst_moms_flagbars'))
+ self.assertTrue(data_exists('mms1_des_compressionloss_brst_dist_flagbars'))
+ self.assertTrue(data_exists('mms1_dis_compressionloss_brst_dist_flagbars'))
+
+ def test_angle_angle(self):
+ mms_fpi_ang_ang('2015-10-16/13:06:30', data_rate='brst', save_png='mms1_fpi_ang_ang_brst', display=False)
+ mms_fpi_ang_ang('2015-10-16/13:06:30', save_jpeg='mms1_fpi_ang_ang', display=False)
+ mms_fpi_ang_ang('2015-10-16/13:06:30', probe='4', save_svg='mms4_fpi_ang_ang', display=False)
+ mms_fpi_ang_ang('2015-10-16/13:06:30', probe='4', save_eps='mms4_fpi_ang_ang_viridis', cmap='viridis', display=False)
+
+ def test_pad(self):
+ trange = ['2015-10-16/13:06:29', '2015-10-16/13:06:32']
+ pyspedas.mms.fpi(trange=trange, data_rate='brst', datatype=['dis-dist', 'des-dist', 'dis-moms'], time_clip=True)
+ pyspedas.mms.fgm(trange=trange, data_rate='brst')
+ dists = mms_get_fpi_dist('mms1_dis_dist_brst')
+ dists_e = mms_get_fpi_dist('mms1_des_dist_brst')
+ pa_dist = mms_pad_fpi(dists, trange=trange, mag_data='mms1_fgm_b_gse_brst_l2_bvec')
+ pa_dist = mms_pad_fpi(dists_e, trange=trange, mag_data='mms1_fgm_b_gse_brst_l2_bvec')
+ pa_dist = mms_pad_fpi(dists, time='2015-10-16/13:06:30', units='eflux', mag_data='mms1_fgm_b_gse_brst_l2_bvec')
+ pa_dist = mms_pad_fpi(dists,
+ subtract_bulk=True,
+ time='2015-10-16/13:06:30',
+ units='eflux',
+ mag_data='mms1_fgm_b_gse_brst_l2_bvec',
+ vel_data='mms1_dis_bulkv_gse_brst')
+
def test_split_tensors(self):
data = pyspedas.mms.fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'],
data_rate='brst',
diff --git a/pyspedas/mms/tests/load_routine_tests.py b/pyspedas/mms/tests/load_routine_tests.py
index e203a8f1..61deb173 100644
--- a/pyspedas/mms/tests/load_routine_tests.py
+++ b/pyspedas/mms/tests/load_routine_tests.py
@@ -1,16 +1,23 @@
import unittest
import numpy as np
-
-from pyspedas.mms import mms_load_state, mms_load_mec, mms_load_fgm, mms_load_scm, mms_load_fpi, mms_load_hpca, mms_load_eis, mms_load_feeps, mms_load_edp, mms_load_edi, mms_load_aspoc, mms_load_dsp
-from pyspedas.utilities.data_exists import data_exists
+from pyspedas.mms import mms_load_state, mms_load_mec, mms_load_fgm, mms_load_scm, mms_load_fpi, mms_load_hpca, mms_load_feeps, mms_load_edp, mms_load_edi, mms_load_aspoc, mms_load_dsp
+from pytplot import data_exists
from pyspedas.mms.hpca.mms_hpca_calc_anodes import mms_hpca_calc_anodes
from pyspedas.mms.hpca.mms_hpca_spin_sum import mms_hpca_spin_sum
-from pyspedas.mms.fpi.mms_fpi_make_errorflagbars import mms_fpi_make_errorflagbars
-from pyspedas.mms.fpi.mms_fpi_make_compressionlossbars import mms_fpi_make_compressionlossbars
+from pyspedas.mms.hpca.mms_get_hpca_info import mms_get_hpca_info
+from pyspedas import tdpwrspc
+import pyspedas
+from pytplot import get_data, del_data, tplot
-from pytplot import get_data, del_data
-from pyspedas import tdpwrspc
+class FSMLoadTestCases(unittest.TestCase):
+ def test_load_basic(self):
+ files = pyspedas.mms.fsm(trange=['2015-10-16/06:00', '2015-10-16/06:05'], available=True)
+ fsm = pyspedas.mms.fsm(trange=['2015-10-16/06:00', '2015-10-16/06:05'])
+ self.assertTrue(data_exists('mms1_fsm_b_mag_brst_l3'))
+ self.assertTrue(data_exists('mms1_fsm_b_gse_brst_l3'))
+ tplot(['mms1_fsm_b_mag_brst_l3', 'mms1_fsm_b_gse_brst_l3'], display=False)
+
class StateLoadTestCases(unittest.TestCase):
def test_load_eph_no_update(self):
@@ -24,11 +31,14 @@ def test_load_eph_data(self):
data = mms_load_state(datatypes=['pos', 'vel'])
self.assertTrue(data_exists('mms1_defeph_pos'))
self.assertTrue(data_exists('mms1_defeph_vel'))
+ tplot(['mms1_defeph_pos', 'mms1_defeph_vel'], display=False)
def test_load_att_data(self):
data = mms_load_state(trange=['2015-10-16', '2015-10-16/06:00'], datatypes=['spinras', 'spindec'])
self.assertTrue(data_exists('mms1_defatt_spinras'))
self.assertTrue(data_exists('mms1_defatt_spindec'))
+ tplot(['mms1_defatt_spinras', 'mms1_defatt_spindec'], display=False)
+
############### DSP ###############
class DSPLoadTestCases(unittest.TestCase):
@@ -40,154 +50,76 @@ def test_load_epsd_bpsd_data(self):
data = mms_load_dsp(trange=['2015-08-01','2015-08-02'], datatype=['epsd', 'bpsd'], level='l2', data_rate='fast')
self.assertTrue(data_exists('mms1_dsp_epsd_omni'))
self.assertTrue(data_exists('mms1_dsp_bpsd_omni'))
+ tplot(['mms1_dsp_epsd_omni', 'mms1_dsp_bpsd_omni'], display=False)
def test_load_bpsd_data(self):
+ data = mms_load_dsp(trange=['2015-10-16','2015-10-17'], datatype='bpsd', level='l2', data_rate='fast', available=True)
data = mms_load_dsp(trange=['2015-10-16','2015-10-17'], datatype='bpsd', level='l2', data_rate='fast')
self.assertTrue(data_exists('mms1_dsp_bpsd_omni_fast_l2'))
+ tplot(['mms1_dsp_bpsd_omni_fast_l2'], display=False)
def test_load_epsd_spdf(self):
data = mms_load_dsp(trange=['2015-08-01','2015-08-02'], datatype='epsd', level='l2', data_rate='fast', spdf=True)
self.assertTrue(data_exists('mms1_dsp_epsd_omni'))
+ tplot(['mms1_dsp_epsd_omni'], display=False)
def test_load_epsd_suffix(self):
data = mms_load_dsp(trange=['2015-08-01','2015-08-02'], datatype='epsd', level='l2', data_rate='fast', suffix='_test')
self.assertTrue(data_exists('mms1_dsp_epsd_omni_test'))
+ tplot(['mms1_dsp_epsd_omni_test'], display=False)
-############### FEEPS ###############
-class FEEPSLoadTestCases(unittest.TestCase):
- def test_load_default_data(self):
- data = mms_load_feeps(trange=['2015-10-16', '2015-10-16/01:00'])
- self.assertTrue(data_exists('mms1_epd_feeps_srvy_l2_electron_intensity_omni'))
- self.assertTrue(data_exists('mms1_epd_feeps_srvy_l2_electron_intensity_omni_spin'))
-
- def test_load_spdf_data(self):
- del_data('*')
- data = mms_load_feeps(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', spdf=True)
- self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni'))
- self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni_spin'))
-
- def test_load_suffix(self):
- data = mms_load_feeps(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', suffix='_test')
- self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni_test'))
- self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni_spin_test'))
-
- def test_load_brst_ion_data(self):
- del_data('*')
- data = mms_load_feeps(probe=4, data_rate='brst', datatype='ion', trange=['2015-10-01/10:48:16', '2015-10-01/10:49:16'])
- self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_ion_intensity_omni'))
- self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_ion_intensity_omni_spin'))
-
- def test_load_brst_multi_probe(self):
- del_data('*')
- data = mms_load_feeps(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', probe=[1, 2, 3, 4])
- self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni'))
- self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni_spin'))
- self.assertTrue(data_exists('mms2_epd_feeps_brst_l2_electron_intensity_omni'))
- self.assertTrue(data_exists('mms2_epd_feeps_brst_l2_electron_intensity_omni_spin'))
- self.assertTrue(data_exists('mms3_epd_feeps_brst_l2_electron_intensity_omni'))
- self.assertTrue(data_exists('mms3_epd_feeps_brst_l2_electron_intensity_omni_spin'))
- self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_electron_intensity_omni'))
- self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_electron_intensity_omni_spin'))
-
-############### FPI ###############
-class FPILoadTestCases(unittest.TestCase):
- def test_load_default_data(self):
- data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'])
- self.assertTrue(data_exists('mms1_dis_energyspectr_omni_fast'))
-
- def test_load_spdf_data(self):
- data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], spdf=True)
- self.assertTrue(data_exists('mms1_dis_energyspectr_omni_fast'))
-
- def test_load_small_brst_interval(self):
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype=['dis-moms', 'dis-dist'], time_clip=True)
- self.assertTrue(data_exists('mms1_dis_energyspectr_omni_brst'))
-
- def test_load_rename_bars(self):
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='des-dist')
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='dis-dist')
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='des-moms')
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='dis-moms')
- self.assertTrue(data_exists('mms1_dis_compressionloss_brst_moms'))
- self.assertTrue(data_exists('mms1_dis_errorflags_brst_moms'))
- self.assertTrue(data_exists('mms1_des_errorflags_brst_moms'))
- self.assertTrue(data_exists('mms1_des_compressionloss_brst_moms'))
- self.assertTrue(data_exists('mms1_des_errorflags_brst_dist'))
- self.assertTrue(data_exists('mms1_des_compressionloss_brst_dist'))
- self.assertTrue(data_exists('mms1_dis_errorflags_brst_dist'))
- self.assertTrue(data_exists('mms1_dis_compressionloss_brst_dist'))
-
- def test_center_fast_ion_data_notplot(self):
- data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], notplot=True)
- centered = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], center_measurement=True, suffix='_centered', notplot=True)
-
- self.assertTrue(np.round(centered['mms1_dis_bulkv_gse_fast_centered']['x'][0]-data['mms1_dis_bulkv_gse_fast']['x'][0], decimals=3) == 2.25)
-
- def test_center_fast_ion_data(self):
- data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'])
- centered = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], center_measurement=True, suffix='_centered')
-
- t, d = get_data('mms1_dis_bulkv_gse_fast')
- c, d = get_data('mms1_dis_bulkv_gse_fast_centered')
- self.assertTrue(np.round(c[0]-t[0], decimals=3) == 2.25)
- def test_center_fast_electron_data(self):
- data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'])
- centered = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], center_measurement=True, suffix='_centered')
-
- t, d = get_data('mms1_des_bulkv_gse_fast')
- c, d = get_data('mms1_des_bulkv_gse_fast_centered')
- self.assertTrue(np.round(c[0]-t[0], decimals=3) == 2.25)
+############### FEEPS ###############
+# class FEEPSLoadTestCases(unittest.TestCase):
+# def test_load_default_data(self):
+# data = mms_load_feeps(trange=['2015-10-16', '2015-10-16/01:00'], available=True)
+# data = mms_load_feeps(trange=['2015-10-16', '2015-10-16/01:00'])
+# self.assertTrue(data_exists('mms1_epd_feeps_srvy_l2_electron_intensity_omni'))
+# self.assertTrue(data_exists('mms1_epd_feeps_srvy_l2_electron_intensity_omni_spin'))
+#
+# def test_load_spdf_data(self):
+# del_data('*')
+# data = mms_load_feeps(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', spdf=True)
+# self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni'))
+# self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni_spin'))
+#
+# def test_load_suffix(self):
+# data = mms_load_feeps(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', suffix='_test')
+# self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni_test'))
+# self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni_spin_test'))
+#
+# def test_load_brst_ion_data(self):
+# del_data('*')
+# data = mms_load_feeps(probe=4, data_rate='brst', datatype='ion', trange=['2015-10-01/10:48:16', '2015-10-01/10:49:16'])
+# self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_ion_intensity_omni'))
+# self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_ion_intensity_omni_spin'))
+#
+# def test_load_brst_multi_probe(self):
+# del_data('*')
+# data = mms_load_feeps(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', probe=[1, 2, 3, 4])
+# self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni'))
+# self.assertTrue(data_exists('mms1_epd_feeps_brst_l2_electron_intensity_omni_spin'))
+# self.assertTrue(data_exists('mms2_epd_feeps_brst_l2_electron_intensity_omni'))
+# self.assertTrue(data_exists('mms2_epd_feeps_brst_l2_electron_intensity_omni_spin'))
+# self.assertTrue(data_exists('mms3_epd_feeps_brst_l2_electron_intensity_omni'))
+# self.assertTrue(data_exists('mms3_epd_feeps_brst_l2_electron_intensity_omni_spin'))
+# self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_electron_intensity_omni'))
+# self.assertTrue(data_exists('mms4_epd_feeps_brst_l2_electron_intensity_omni_spin'))
- def test_center_brst_ion_data(self):
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst')
- centered = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', center_measurement=True, suffix='_centered')
-
- t, d = get_data('mms1_dis_bulkv_gse_brst')
- c, d = get_data('mms1_dis_bulkv_gse_brst_centered')
- self.assertTrue(np.round(c[0]-t[0], decimals=3) == 0.075)
- def test_center_brst_electron_data(self):
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst')
- centered = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', center_measurement=True, suffix='_centered')
-
- t, d = get_data('mms1_des_bulkv_gse_brst')
- c, d = get_data('mms1_des_bulkv_gse_brst_centered')
- self.assertTrue(np.round(c[0]-t[0], decimals=3) == 0.015)
-
- def test_errorflag_compression_bars(self):
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype=['des-dist', 'des-moms'])
- data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype=['dis-dist', 'dis-moms'])
- # mms_fpi_make_errorflagbars('mms1_des_errorflags_brst_moms', level='l2')
- # mms_fpi_make_errorflagbars('mms1_dis_errorflags_brst_moms', level='l2')
- # mms_fpi_make_errorflagbars('mms1_des_errorflags_brst_dist', level='l2')
- # mms_fpi_make_errorflagbars('mms1_dis_errorflags_brst_dist', level='l2')
- # mms_fpi_make_compressionlossbars('mms1_des_compressionloss_brst_moms')
- # mms_fpi_make_compressionlossbars('mms1_dis_compressionloss_brst_moms')
- # mms_fpi_make_compressionlossbars('mms1_des_compressionloss_brst_dist')
- # mms_fpi_make_compressionlossbars('mms1_dis_compressionloss_brst_dist')
- self.assertTrue(data_exists('mms1_des_errorflags_brst_moms_flagbars_full'))
- self.assertTrue(data_exists('mms1_des_errorflags_brst_moms_flagbars_main'))
- self.assertTrue(data_exists('mms1_des_errorflags_brst_moms_flagbars_mini'))
- self.assertTrue(data_exists('mms1_dis_errorflags_brst_moms_flagbars_full'))
- self.assertTrue(data_exists('mms1_dis_errorflags_brst_moms_flagbars_main'))
- self.assertTrue(data_exists('mms1_dis_errorflags_brst_moms_flagbars_mini'))
- self.assertTrue(data_exists('mms1_des_errorflags_brst_dist_flagbars_dist'))
- self.assertTrue(data_exists('mms1_dis_errorflags_brst_dist_flagbars_dist'))
- self.assertTrue(data_exists('mms1_des_compressionloss_brst_moms_flagbars'))
- self.assertTrue(data_exists('mms1_dis_compressionloss_brst_moms_flagbars'))
- self.assertTrue(data_exists('mms1_des_compressionloss_brst_dist_flagbars'))
- self.assertTrue(data_exists('mms1_dis_compressionloss_brst_dist_flagbars'))
############### HPCA ###############
class HPCALoadTestCases(unittest.TestCase):
def test_load_default_data(self):
+ data = mms_load_hpca(trange=['2015-10-16', '2015-10-16/01:00'], available=True)
data = mms_load_hpca(trange=['2015-10-16', '2015-10-16/01:00'])
self.assertTrue(data_exists('mms1_hpca_hplus_number_density'))
+ tplot(['mms1_hpca_hplus_number_density'], display=False)
def test_load_spdf_data(self):
data = mms_load_hpca(trange=['2015-10-16', '2015-10-16/01:00'], spdf=True)
self.assertTrue(data_exists('mms1_hpca_hplus_number_density'))
+ tplot(['mms1_hpca_hplus_number_density'], display=False)
def test_load_ion_omni_suffix(self):
del_data('*')
@@ -195,16 +127,21 @@ def test_load_ion_omni_suffix(self):
mms_hpca_calc_anodes(fov=[0, 360], probe=2, suffix='_brst')
mms_hpca_spin_sum(probe=2, suffix='_brst', avg=True)
self.assertTrue(data_exists('mms2_hpca_hplus_flux_brst_elev_0-360_spin'))
+ tplot(['mms2_hpca_hplus_flux_brst_elev_0-360_spin'], display=False)
def test_load_ion_omni(self):
del_data('*')
- data = mms_load_hpca(trange=['2016-10-16/5:00', '2016-10-16/6:00'], datatype='ion')
+ data = mms_load_hpca(trange=['2016-10-16', '2016-10-16/6:00'], datatype='ion')
mms_hpca_calc_anodes(fov=[0, 360], probe='1')
mms_hpca_spin_sum()
self.assertTrue(data_exists('mms1_hpca_hplus_flux_elev_0-360_spin'))
self.assertTrue(data_exists('mms1_hpca_heplus_flux_elev_0-360_spin'))
self.assertTrue(data_exists('mms1_hpca_heplusplus_flux_elev_0-360_spin'))
self.assertTrue(data_exists('mms1_hpca_oplus_flux_elev_0-360_spin'))
+ tplot(['mms1_hpca_hplus_flux_elev_0-360_spin',
+ 'mms1_hpca_heplus_flux_elev_0-360_spin',
+ 'mms1_hpca_heplusplus_flux_elev_0-360_spin',
+ 'mms1_hpca_oplus_flux_elev_0-360_spin'], display=False)
def test_center_fast_moments_data(self):
data = mms_load_hpca(trange=['2015-10-16/14:00', '2015-10-16/15:00'])
@@ -222,25 +159,37 @@ def test_center_brst_moments_data(self):
c, d = get_data('mms1_hpca_hplus_ion_bulk_velocity_centered')
self.assertTrue(np.round(c[0]-t[0], decimals=3) == 5.0)
+ def test_hpca_info(self):
+ info = mms_get_hpca_info()
+ self.assertTrue(list(info.keys()) == ['elevation', 't_spin', 't_sweep', 'azimuth_energy_offset'])
+
+
############### EDI ###############
class EDILoadTestCases(unittest.TestCase):
def test_load_default_data(self):
+ data = mms_load_edi(trange=['2016-10-17/13:00', '2016-10-17/14:00'], available=True)
data = mms_load_edi(trange=['2016-10-17/13:00', '2016-10-17/14:00'])
self.assertTrue(data_exists('mms1_edi_e_gse_srvy_l2'))
+ tplot(['mms1_edi_e_gse_srvy_l2'], display=False)
def test_load_spdf_data(self):
data = mms_load_edi(trange=['2016-10-17/13:00', '2016-10-17/14:00'], spdf=True)
self.assertTrue(data_exists('mms1_edi_e_gse_srvy_l2'))
+ tplot(['mms1_edi_e_gse_srvy_l2'], display=False)
def test_load_suffix(self):
data = mms_load_edi(trange=['2016-10-17/13:00', '2016-10-17/14:00'], suffix='_test')
self.assertTrue(data_exists('mms1_edi_e_gse_srvy_l2_test'))
+ tplot(['mms1_edi_e_gse_srvy_l2_test'], display=False)
+
############### ASPOC ###############
class ASPOCLoadTestCases(unittest.TestCase):
def test_load_default_data(self):
+ data = mms_load_aspoc(trange=['2015-10-16', '2015-10-16/01:00'], available=True)
data = mms_load_aspoc(trange=['2015-10-16', '2015-10-16/01:00'])
self.assertTrue(data_exists('mms1_aspoc_ionc_l2'))
+ tplot(['mms1_aspoc_ionc_l2'], display=False)
def test_load_spdf_data(self):
data = mms_load_aspoc(trange=['2015-10-16', '2015-10-16/01:00'], spdf=True)
@@ -250,19 +199,24 @@ def test_load_suffix(self):
data = mms_load_aspoc(trange=['2015-10-16', '2015-10-16/01:00'], suffix='_test')
self.assertTrue(data_exists('mms1_aspoc_ionc_l2_test'))
+
############### EDP ###############
class EDPLoadTestCases(unittest.TestCase):
def test_load_default_data(self):
+ data = mms_load_edp(trange=['2015-10-16', '2015-10-16/01:00'], available=True)
data = mms_load_edp(trange=['2015-10-16', '2015-10-16/01:00'])
self.assertTrue(data_exists('mms1_edp_dce_gse_fast_l2'))
+ tplot(['mms1_edp_dce_gse_fast_l2'], display=False)
def test_load_hfesp_data(self):
data = mms_load_edp(trange=['2015-10-16', '2015-10-16/01:00'], datatype='hfesp', data_rate='srvy')
self.assertTrue(data_exists('mms1_edp_hfesp_srvy_l2'))
+ tplot(['mms1_edp_hfesp_srvy_l2'], display=False)
def test_load_spdf_data(self):
data = mms_load_edp(trange=['2015-10-16', '2015-10-16/01:00'], spdf=True)
self.assertTrue(data_exists('mms1_edp_dce_gse_fast_l2'))
+ tplot(['mms1_edp_dce_gse_fast_l2'], display=False)
def test_load_suffix(self):
data = mms_load_edp(trange=['2015-10-16', '2015-10-16/01:00'], suffix='_test')
@@ -278,6 +232,8 @@ def test_load_suffix(self):
def test_load_brst_data(self):
data = mms_load_edp(data_rate='brst', trange=['2015-10-16/13:06', '2015-10-16/13:10'])
self.assertTrue(data_exists('mms1_edp_dce_gse_brst_l2'))
+ tplot(['mms1_edp_dce_gse_brst_l2'], display=False)
+
############### FGM ###############
class FGMLoadTestCases(unittest.TestCase):
@@ -290,10 +246,12 @@ def test_regression_multi_imports_spdf(self):
self.assertTrue(d1.shape == d2.shape)
def test_load_default_data(self):
+ data = mms_load_fgm(trange=['2015-10-16', '2015-10-16/01:00'], available=True)
data = mms_load_fgm(trange=['2015-10-16', '2015-10-16/01:00'])
self.assertTrue(data_exists('mms1_fgm_b_gse_srvy_l2'))
self.assertTrue(data_exists('Epoch'))
self.assertTrue(data_exists('Epoch_state'))
+ tplot(['mms1_fgm_b_gse_srvy_l2'], display=False)
def test_load_spdf_data(self):
data = mms_load_fgm(data_rate='brst', trange=['2015-10-16/13:06', '2015-10-16/13:10'], spdf=True)
@@ -313,6 +271,7 @@ def test_load_multiple_sc(self):
def test_load_brst_data(self):
data = mms_load_fgm(data_rate='brst', trange=['2015-10-16/13:06', '2015-10-16/13:10'])
self.assertTrue(data_exists('mms1_fgm_b_gse_brst_l2'))
+ tplot(['mms1_fgm_b_gse_brst_l2'], display=False)
def test_load_data_no_update(self):
data = mms_load_fgm(trange=['2015-10-16', '2015-10-16/01:00']) # make sure the files exist locally
@@ -320,11 +279,14 @@ def test_load_data_no_update(self):
data = mms_load_fgm(trange=['2015-10-16', '2015-10-16/01:00'], no_update=True) # load the file from the local cache
self.assertTrue(data_exists('mms1_fgm_b_gse_srvy_l2'))
-############### MEC ###############
+
+############### MEC ###############
class MECLoadTestCases(unittest.TestCase):
def test_load_default_data(self):
+ data = mms_load_mec(trange=['2015-10-16', '2015-10-16/01:00'], available=True)
data = mms_load_mec(trange=['2015-10-16', '2015-10-16/01:00'])
self.assertTrue(data_exists('mms1_mec_r_sm'))
+ tplot(['mms1_mec_r_sm'], display=False)
def test_load_spdf_data(self):
data = mms_load_mec(trange=['2015-10-16', '2015-10-16/01:00'], spdf=True)
@@ -334,19 +296,29 @@ def test_load_suffix(self):
data = mms_load_mec(trange=['2015-10-16', '2015-10-16/01:00'], suffix='_test')
self.assertTrue(data_exists('mms1_mec_r_sm_test'))
+
class SCMLoadTestCases(unittest.TestCase):
def test_brst_dpwrspc_data(self):
data = mms_load_scm(probe=4, data_rate='brst', datatype='scb', trange=['2015-10-01/10:48:16', '2015-10-01/10:49:16'])
- tdpwrspc('mms4_scm_acb_gse_scb_brst_l2')
+ tdpwrspc('mms4_scm_acb_gse_scb_brst_l2', notmvariance=True)
self.assertTrue(data_exists('mms4_scm_acb_gse_scb_brst_l2'))
self.assertTrue(data_exists('mms4_scm_acb_gse_scb_brst_l2_x_dpwrspc'))
self.assertTrue(data_exists('mms4_scm_acb_gse_scb_brst_l2_y_dpwrspc'))
self.assertTrue(data_exists('mms4_scm_acb_gse_scb_brst_l2_z_dpwrspc'))
+ tplot(['mms4_scm_acb_gse_scb_brst_l2',
+ 'mms4_scm_acb_gse_scb_brst_l2_x_dpwrspc',
+ 'mms4_scm_acb_gse_scb_brst_l2_y_dpwrspc',
+ 'mms4_scm_acb_gse_scb_brst_l2_z_dpwrspc'], display=False)
def test_load_default_data(self):
+ data = mms_load_scm(trange=['2015-10-16', '2015-10-16/01:00'], available=True)
data = mms_load_scm(trange=['2015-10-16', '2015-10-16/01:00'])
self.assertTrue(data_exists('mms1_scm_acb_gse_scsrvy_srvy_l2'))
+ def test_load_schb(self):
+ data = pyspedas.mms.scm(probe=4, data_rate='brst', datatype='schb', trange=['2015-10-01/10:48:16', '2015-10-01/10:49:16'])
+ self.assertTrue(data_exists('mms4_scm_acb_gse_schb_brst_l2'))
+
def test_load_suffix(self):
data = mms_load_scm(trange=['2015-10-16', '2015-10-16/01:00'], suffix='_test')
self.assertTrue(data_exists('mms1_scm_acb_gse_scsrvy_srvy_l2_test'))
@@ -362,5 +334,10 @@ def test_load_brst_data(self):
data = mms_load_scm(data_rate='brst', trange=['2015-10-16/13:06', '2015-10-16/13:10'], datatype='scb')
self.assertTrue(data_exists('mms1_scm_acb_gse_scb_brst_l2'))
+ def test_available(self):
+ files = mms_load_scm(data_rate='brst', trange=['2015-10-16/13:06', '2015-10-16/13:10'], datatype='scb', available=True)
+ self.assertTrue(len(files) == 2)
+
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/mms/tests/mms_part_getspec.py b/pyspedas/mms/tests/mms_part_getspec.py
new file mode 100644
index 00000000..d318b04a
--- /dev/null
+++ b/pyspedas/mms/tests/mms_part_getspec.py
@@ -0,0 +1,120 @@
+import unittest
+from pyspedas.mms.particles.mms_part_getspec import mms_part_getspec
+from pytplot import data_exists
+
+
+class PGSTests(unittest.TestCase):
+ def test_pgs_errors(self):
+ # no trange specified
+ mms_part_getspec()
+ # unsupported instrument
+ mms_part_getspec(trange=['2015-10-16/13:06:00', '2015-10-16/13:06:10'], instrument='feeps')
+
+ def test_fpi_brst_fac_type(self):
+ mms_part_getspec(trange=['2015-10-16/13:06:00', '2015-10-16/13:06:10'],
+ data_rate='brst',
+ species='i',
+ fac_type='phigeo',
+ output='pa gyro moments')
+ self.assertTrue(data_exists('mms1_dis_dist_brst_density'))
+ mms_part_getspec(trange=['2015-10-16/13:06:00', '2015-10-16/13:06:10'],
+ data_rate='brst',
+ species='i',
+ fac_type='xgse',
+ output='pa gyro moments')
+ self.assertTrue(data_exists('mms1_dis_dist_brst_density'))
+
+ def test_fpi_brst_i(self):
+ mms_part_getspec(trange=['2015-10-16/13:06:00', '2015-10-16/13:06:10'],
+ data_rate='brst',
+ species='i',
+ output='energy theta phi pa gyro moments')
+ self.assertTrue(data_exists('mms1_dis_dist_brst_density'))
+ self.assertTrue(data_exists('mms1_dis_dist_brst_velocity'))
+ self.assertTrue(data_exists('mms1_dis_dist_brst_avgtemp'))
+ self.assertTrue(data_exists('mms1_dis_dist_brst_energy'))
+ self.assertTrue(data_exists('mms1_dis_dist_brst_theta'))
+ self.assertTrue(data_exists('mms1_dis_dist_brst_phi'))
+ self.assertTrue(data_exists('mms1_dis_dist_brst_pa'))
+ self.assertTrue(data_exists('mms1_dis_dist_brst_gyro'))
+
+ def test_fpi_disable_pe_corr(self):
+ mms_part_getspec(trange=['2015-10-16/13:06:07', '2015-10-16/13:06:08'],
+ data_rate='brst',
+ species='e',
+ output='energy',
+ disable_photoelectron_corrections=True)
+ self.assertTrue(data_exists('mms1_des_dist_brst_energy'))
+
+ def test_fpi_brst_e(self):
+ mms_part_getspec(trange=['2015-10-16/13:06:07', '2015-10-16/13:06:08'],
+ data_rate='brst',
+ species='e',
+ output='energy theta phi pa gyro moments')
+ self.assertTrue(data_exists('mms1_des_dist_brst_density'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_velocity'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_avgtemp'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_energy'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_theta'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_phi'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_pa'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_gyro'))
+
+ def test_fpi_brst_limits(self):
+ mms_part_getspec(trange=['2015-10-16/13:06:07', '2015-10-16/13:06:08'],
+ data_rate='brst',
+ species='e',
+ theta=[0, 90],
+ phi=[0, 100],
+ gyro=[0, 180],
+ pitch=[45, 75],
+ energy=[1000, 20000],
+ output='energy theta phi pa gyro moments')
+ self.assertTrue(data_exists('mms1_des_dist_brst_density'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_velocity'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_avgtemp'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_energy'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_theta'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_phi'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_pa'))
+ self.assertTrue(data_exists('mms1_des_dist_brst_gyro'))
+
+ def test_hpca_srvy_hplus(self):
+ mms_part_getspec(trange=['2015-10-16/13:05', '2015-10-16/13:10'],
+ #data_rate='brst',
+ species='hplus',
+ instrument='hpca',
+ output='energy theta phi pa gyro moments')
+ self.assertTrue(data_exists('mms1_hpca_hplus_phase_space_density_density'))
+ self.assertTrue(data_exists('mms1_hpca_hplus_phase_space_density_velocity'))
+ self.assertTrue(data_exists('mms1_hpca_hplus_phase_space_density_avgtemp'))
+ self.assertTrue(data_exists('mms1_hpca_hplus_phase_space_density_energy'))
+ self.assertTrue(data_exists('mms1_hpca_hplus_phase_space_density_theta'))
+ self.assertTrue(data_exists('mms1_hpca_hplus_phase_space_density_phi'))
+ self.assertTrue(data_exists('mms1_hpca_hplus_phase_space_density_pa'))
+ self.assertTrue(data_exists('mms1_hpca_hplus_phase_space_density_gyro'))
+
+ def test_hpca_srvy_oplus(self):
+ mms_part_getspec(trange=['2015-10-16/13:05', '2015-10-16/13:10'],
+ species='oplus',
+ instrument='hpca',
+ output='energy')
+ self.assertTrue(data_exists('mms1_hpca_oplus_phase_space_density_energy'))
+
+ def test_hpca_srvy_heplus(self):
+ mms_part_getspec(trange=['2015-10-16/13:05', '2015-10-16/13:10'],
+ species='heplus',
+ instrument='hpca',
+ output='energy')
+ self.assertTrue(data_exists('mms1_hpca_heplus_phase_space_density_energy'))
+
+ def test_hpca_srvy_heplusplus(self):
+ mms_part_getspec(trange=['2015-10-16/13:05', '2015-10-16/13:10'],
+ species='heplusplus',
+ instrument='hpca',
+ output='energy')
+ self.assertTrue(data_exists('mms1_hpca_heplusplus_phase_space_density_energy'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/mms/tests/neutral_sheet.py b/pyspedas/mms/tests/neutral_sheet.py
new file mode 100644
index 00000000..a4c646c1
--- /dev/null
+++ b/pyspedas/mms/tests/neutral_sheet.py
@@ -0,0 +1,62 @@
+import numpy as np
+import unittest
+import pyspedas
+from pyspedas import tkm2re
+from pyspedas.analysis.neutral_sheet import neutral_sheet
+from pytplot import get_data
+
+pyspedas.mms.mec()
+tkm2re('mms1_mec_r_gsm')
+pos_data = get_data('mms1_mec_r_gsm_re')
+
+
+class NSTests(unittest.TestCase):
+ def test_lopez(self):
+ model = neutral_sheet(pos_data.times, pos_data.y, model='lopez', sc2NS=True)
+ self.assertTrue(isinstance(model, np.ndarray))
+ model = neutral_sheet(pos_data.times, pos_data.y, model='lopez')
+ self.assertTrue(isinstance(model, np.ndarray))
+
+ def test_sm(self):
+ model = neutral_sheet(pos_data.times, pos_data.y, model='sm', sc2NS=True)
+ self.assertTrue(isinstance(model, np.ndarray))
+ model = neutral_sheet(pos_data.times, pos_data.y, model='sm')
+ self.assertTrue(isinstance(model, np.ndarray))
+
+ def test_themis(self):
+ model = neutral_sheet(pos_data.times, pos_data.y, model='themis', sc2NS=True)
+ self.assertTrue(isinstance(model, np.ndarray))
+ model = neutral_sheet(pos_data.times, pos_data.y, model='themis')
+ self.assertTrue(isinstance(model, np.ndarray))
+
+ def test_aen(self):
+ model = neutral_sheet(pos_data.times, pos_data.y, model='aen', sc2NS=True)
+ self.assertTrue(isinstance(model, np.ndarray))
+ model = neutral_sheet(pos_data.times, pos_data.y, model='aen')
+ self.assertTrue(isinstance(model, np.ndarray))
+
+ def test_den(self):
+ model = neutral_sheet(pos_data.times, pos_data.y, model='den', sc2NS=True)
+ self.assertTrue(isinstance(model, np.ndarray))
+ model = neutral_sheet(pos_data.times, pos_data.y, model='den')
+ self.assertTrue(isinstance(model, np.ndarray))
+
+ def test_fairfield(self):
+ model = neutral_sheet(pos_data.times, pos_data.y, model='fairfield', sc2NS=True)
+ self.assertTrue(isinstance(model, np.ndarray))
+ model = neutral_sheet(pos_data.times, pos_data.y, model='fairfield')
+ self.assertTrue(isinstance(model, np.ndarray))
+
+ def test_den_fairfield(self):
+ model = neutral_sheet(pos_data.times, pos_data.y, model='den_fairfield', sc2NS=True)
+ self.assertTrue(isinstance(model, np.ndarray))
+ model = neutral_sheet(pos_data.times, pos_data.y, model='den_fairfield')
+ self.assertTrue(isinstance(model, np.ndarray))
+
+ def test_invalid_model(self):
+ model = neutral_sheet(pos_data.times, pos_data.y, model='ff', sc2NS=True)
+ self.assertTrue(not isinstance(model, np.ndarray))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/mms/tests/orbit_plots.py b/pyspedas/mms/tests/orbit_plots.py
new file mode 100644
index 00000000..25fc581c
--- /dev/null
+++ b/pyspedas/mms/tests/orbit_plots.py
@@ -0,0 +1,194 @@
+import unittest
+import os
+from pyspedas.mms.mms_orbit_plot import mms_orbit_plot
+
+
+class TestMMSOrbitPlot(unittest.TestCase):
+ def test_trange(self):
+ # Set trange and save_png options
+ trange = ['2015-10-16/00:00:00', '2015-10-16/12:00:00']
+ save_png = 'test_trange'
+
+ mms_orbit_plot(trange=trange, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_probes(self):
+ # Set probes and save_png options
+ probes = [1, 2]
+ save_png = 'test_probes'
+
+ mms_orbit_plot(probes=probes, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_data_rate(self):
+ # Set data_rate and save_png options
+ data_rate = 'brst'
+ save_png = 'test_data_rate'
+
+ mms_orbit_plot(probes=[1, 4], trange=['2019-05-01/02:00', '2019-05-01/02:20'], data_rate=data_rate, save_png=save_png, display=False, earth=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_plane(self):
+ # Set plane and save_png options
+ plane = 'yz'
+ save_png = 'test_plane'
+
+ mms_orbit_plot(plane=plane, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_xr(self):
+ # Set xr and save_png options
+ xr = [-5, 5]
+ save_png = 'test_xr'
+
+ mms_orbit_plot(xr=xr, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_yr(self):
+ # Set yr and save_png options
+ yr = [-5, 5]
+ save_png = 'test_yr'
+
+ mms_orbit_plot(yr=yr, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_coord(self):
+ # Set coord and save_png options
+ coord = 'gsm'
+ save_png = 'test_coord'
+
+ mms_orbit_plot(coord=coord, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_xsize(self):
+ # Set xsize and save_png options
+ xsize = 10
+ save_png = 'test_xsize'
+
+ mms_orbit_plot(xsize=xsize, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_ysize(self):
+ # Set ysize and save_png options
+ ysize = 10
+ save_png = 'test_ysize'
+
+ mms_orbit_plot(ysize=ysize, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_marker(self):
+ # Set marker and save_png options
+ marker = 'o'
+ save_png = 'test_marker'
+
+ mms_orbit_plot(marker=marker, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_markevery(self):
+ # Set markevery and save_png options
+ markevery = 5
+ save_png = 'test_markevery'
+
+ mms_orbit_plot(markevery=markevery, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_markersize(self):
+ # Set markersize and save_png options
+ markersize = 10
+ save_png = 'test_markersize'
+
+ mms_orbit_plot(markersize=markersize, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_earth(self):
+ # Set earth and save_png options
+ earth = False
+ save_png = 'test_earth'
+
+ mms_orbit_plot(earth=earth, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_dpi(self):
+ # Set dpi and save_png options
+ dpi = 100
+ save_png = 'test_dpi'
+
+ mms_orbit_plot(dpi=dpi, save_png=save_png, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_png + '.png'))
+
+ def test_save_pdf(self):
+ # Set save_png option
+ save_pdf = 'test_save_pdf'
+
+ mms_orbit_plot(save_pdf=save_pdf, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_pdf + '.pdf'))
+
+ def test_save_eps(self):
+ # Set save_eps option
+ save_eps = 'test_save_eps'
+
+ mms_orbit_plot(save_eps=save_eps, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_eps + '.eps'))
+
+ def test_save_jpeg(self):
+ # Set save_jpeg option
+ save_jpeg = 'test_save_jpeg'
+
+ mms_orbit_plot(save_jpeg=save_jpeg, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_jpeg + '.jpeg'))
+
+ def test_save_svg(self):
+ # Set save_svg option
+ save_svg = 'test_save_svg'
+
+ mms_orbit_plot(save_svg=save_svg, display=False)
+
+ # Check that the test figure was saved
+ self.assertTrue(os.path.exists(save_svg + '.svg'))
+
+ def test_return_plot_objects(self):
+ # Set return_plot_objects option
+ return_plot_objects = True
+
+ plot_objects = mms_orbit_plot(return_plot_objects=return_plot_objects, display=False)
+
+ # Check that plot_objects is a tuple
+ self.assertIsInstance(plot_objects, tuple)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/mms/tests/ql_l1b_sitl_tests.py b/pyspedas/mms/tests/ql_l1b_sitl_tests.py
index 436b4786..7f1deb89 100644
--- a/pyspedas/mms/tests/ql_l1b_sitl_tests.py
+++ b/pyspedas/mms/tests/ql_l1b_sitl_tests.py
@@ -1,8 +1,9 @@
import unittest
-import numpy as np
-
import pyspedas
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
+from pyspedas.mms.hpca.mms_hpca_calc_anodes import mms_hpca_calc_anodes
+from pyspedas.mms.hpca.mms_hpca_spin_sum import mms_hpca_spin_sum
+
class LoadTestCases(unittest.TestCase):
def test_load_fgm_ql(self):
@@ -26,6 +27,11 @@ def test_load_scm_l1a(self):
data = pyspedas.mms.scm(probe=1, level='l1a', trange=['2015-12-15', '2015-12-16'])
self.assertTrue(data_exists('mms1_scm_acb_scm123_scsrvy_srvy_l1a'))
+ def test_load_scm_l1a_brst(self):
+ data = pyspedas.mms.scm(data_rate='brst', trange=['2015-10-16/13:06', '2015-10-16/13:10'], level='l1a')
+ self.assertTrue(data_exists('mms1_scm_acb_scm123_scb_brst_l1a'))
+ self.assertTrue(data_exists('mms1_scm_acb_scm123_schb_brst_l1a'))
+
def test_load_scm_l1b(self):
data = pyspedas.mms.scm(probe=4, level='l1b', trange=['2015-12-15', '2015-12-16'])
self.assertTrue(data_exists('mms4_scm_acb_scm123_scsrvy_srvy_l1b'))
@@ -49,5 +55,24 @@ def test_load_fpi_ql(self):
self.assertTrue(data_exists('mms1_des_energyspectr_omni_fast'))
self.assertTrue(data_exists('mms1_des_energyspectr_py_fast'))
+ def test_load_hpca_l1b(self):
+ trange = ['2015-10-16/13:06', '2015-10-16/13:07']
+ data = pyspedas.mms.hpca(probe=1, level='l1b', trange=trange, data_rate='brst')
+ data = pyspedas.mms.hpca(probe=1, level='l1b', trange=trange, data_rate='brst', datatype='ion')
+ data = pyspedas.mms.hpca(probe=1, level='l1b', trange=trange, data_rate='brst', datatype='combined')
+ data = pyspedas.mms.hpca(probe=1, level='l1b', trange=trange, data_rate='brst', datatype='rf_corr')
+ data = pyspedas.mms.hpca(probe=1, level='l1b', trange=trange, data_rate='brst', datatype='count_rate')
+ data = pyspedas.mms.hpca(probe=1, level='l1b', trange=trange, data_rate='brst', datatype='flux')
+ data = pyspedas.mms.hpca(probe=1, level='l1b', trange=trange, data_rate='brst', datatype='vel_dist')
+ data = pyspedas.mms.hpca(probe=1, level='l1b', trange=trange, data_rate='brst', datatype='bkgd_corr')
+ mms_hpca_calc_anodes(fov=[0, 360])
+ self.assertTrue(data_exists('mms1_hpca_hplus_number_density'))
+ self.assertTrue(data_exists('mms1_hpca_heplus_number_density'))
+ self.assertTrue(data_exists('mms1_hpca_heplusplus_number_density'))
+ self.assertTrue(data_exists('mms1_hpca_oplus_number_density'))
+ self.assertTrue(data_exists('mms1_hpca_heplusplus_RF_corrected'))
+ self.assertTrue(data_exists('mms1_hpca_oplusplus_count_rate_elev_0-360'))
+
+
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
diff --git a/pyspedas/mms/tests/setup_tests.py b/pyspedas/mms/tests/setup_tests.py
index d71913bb..e0be1db7 100644
--- a/pyspedas/mms/tests/setup_tests.py
+++ b/pyspedas/mms/tests/setup_tests.py
@@ -1,4 +1,6 @@
import os
import pickle
-pickle.dump({'user': '', 'passwd': ''}, open(os.sep.join([os.path.expanduser('~'), 'mms_auth_info.pkl']), 'wb'))
+MMS_AUTH_U = os.getenv('MMS_AUTH_U')
+MMS_AUTH_P = os.getenv('MMS_AUTH_P')
+pickle.dump({'user': MMS_AUTH_U, 'passwd': MMS_AUTH_P}, open(os.sep.join([os.path.expanduser('~'), 'mms_auth_info.pkl']), 'wb'))
diff --git a/pyspedas/mms/tests/slice2d.py b/pyspedas/mms/tests/slice2d.py
index efb7045b..b7d75b63 100644
--- a/pyspedas/mms/tests/slice2d.py
+++ b/pyspedas/mms/tests/slice2d.py
@@ -1,16 +1,132 @@
+import numpy as np
import unittest
from pyspedas.mms.particles.mms_part_slice2d import mms_part_slice2d
+from pyspedas.particles.spd_units_string import spd_units_string
class SliceTests(unittest.TestCase):
+ def test_notime(self):
+ nothing = mms_part_slice2d()
+ self.assertTrue(not nothing)
+
+ def test_fpi_nospecies(self):
+ mms_part_slice2d(time='2015-10-16/13:06:30', probe='1', rotation='xy', display=False)
+ mms_part_slice2d(time='2015-10-16/13:06:30', probe='1', instrument='hpca', rotation='xy', display=False)
+
+ def test_fpi_return_slice(self):
+ the_slice = mms_part_slice2d(time='2015-10-16/13:06:30', probe='1', rotation='xy', display=False, return_slice=True)
+ self.assertTrue(isinstance(the_slice, dict))
+
+ def test_fpi_brst_slice_x(self):
+ mms_part_slice2d(time='2015-10-16/13:06:30', slice_x=np.array([1, 0, 0]), probe='1', species='i', data_rate='brst', rotation='bv', save_png='test_fpi_brst_i_bv_slice_x', display=False)
+
+ def test_fpi_brst_slice_norm(self):
+ mms_part_slice2d(time='2015-10-16/13:06:30', slice_norm=np.array([0, 0, 1]), probe='1', species='i', data_rate='brst', rotation='bv', save_png='test_fpi_brst_i_bv_slice_norm', display=False)
+
def test_fpi_brst_rotations(self):
time = '2015-10-16/13:06:30'
- rotations = ['xy', 'xz', 'bv', 'be', 'xvel', 'perp', 'perp_xy', 'perp_xz', 'perp_yz', 'b_exb', 'perp1-perp2']
- species = ['i', 'e']
+ # rotations = ['xy', 'xz', 'bv', 'be', 'xvel', 'perp', 'perp_xy', 'perp_xz', 'perp_yz', 'b_exb', 'perp1-perp2']
+ rotations = ['bv']
+ species = ['i']
for spc in species:
for rotation in rotations:
mms_part_slice2d(time=time, probe='1', species=spc, data_rate='brst', rotation=rotation, save_png='test_fpi_brst_' + spc + '_' + rotation, display=False)
+ def test_fpi_subtract_bulk(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='2d', save_png='test_fpi_brst_subtract_bulk',
+ subtract_bulk=True, display=False)
+
+ def test_fpi_avg_angle(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='geometric', save_png='test_fpi_brst_avg_angle',
+ average_angle=[-45, 45], display=False)
+
+ def test_fpi_sum_angle(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='geometric', save_png='test_fpi_brst_sum_angle',
+ sum_angle=[-45, 45], display=False)
+
+ def test_fpi_energy(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='geometric', save_png='test_fpi_brst_energy',
+ energy=True, display=False)
+
+ def test_fpi_samples(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='geometric', save_png='test_fpi_brst_samples',
+ samples=3, display=False)
+
+ def test_fpi_window(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='geometric', save_png='test_fpi_brst_window',
+ window=3, display=False)
+
+ def test_fpi_window_center(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='geometric', save_png='test_fpi_brst_window_center',
+ window=3, center_time=True, display=False)
+
+ def test_fpi_custom_rotation(self):
+ rot = np.zeros((3, 3))
+ rot[:, 0] = [0.33796266 , -0.082956984 , 0.93749634]
+ rot[:, 1] = [0.64217210 , -0.70788234 , -0.29413872]
+ rot[:, 2] = [0.68803796 , 0.70144189 , -0.18596514]
+
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='geometric', save_png='test_fpi_brst_custom_rotation',
+ custom_rotation=rot, display=False)
+
+ def test_fpi_2d_interp(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='2d', save_png='test_fpi_brst_2d_interp', display=False)
+
+ def test_fpi_2d_interp_zdirrange(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy',
+ interpolation='2d', save_png='test_fpi_brst_zdirrange', display=False,
+ zdirrange=[0, 2500])
+
+ def test_fpi_limits(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='i', data_rate='brst', rotation='xy', erange=[0, 10000],
+ save_png='test_fpi_brst_erange', display=False)
+
+ def test_fpi_electrons(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='e', data_rate='brst', rotation='xy',
+ save_png='test_fpi_brst_electrons', display=False)
+
+ def test_hpca(self):
+ time = '2015-10-16/13:06:30'
+ mms_part_slice2d(time=time, probe='1', species='hplus', instrument='hpca', data_rate='brst', rotation='xy',
+ save_png='test_hpca_brst', display=False)
+
+ def test_hpca_trange(self):
+ trange = ['2015-10-16/13:06:20', '2015-10-16/13:06:40']
+ mms_part_slice2d(trange=trange, probe='1', species='hplus', instrument='hpca', data_rate='brst', rotation='xy',
+ save_png='test_hpca_brst_trange', display=False)
+
+ def test_units_string(self):
+ self.assertTrue(spd_units_string('counts') == 'Counts')
+ self.assertTrue(spd_units_string('rate') == 'Rate (#/sec)')
+ self.assertTrue(spd_units_string('eflux') == 'Energy Flux (eV / sec / $cm^2$ / ster / eV)')
+ self.assertTrue(spd_units_string('flux') == 'Flux (# / sec / $cm^2$ / ster / eV)')
+ self.assertTrue(spd_units_string('df') == 'f ($s^3$ / $cm^3$ / $km^3$)')
+ self.assertTrue(spd_units_string('df_cm') == 'f ($s^3$ / $cm^6$)')
+ self.assertTrue(spd_units_string('df_km') == 'f ($s^3$ / $km^6$)')
+ self.assertTrue(spd_units_string('e2flux') == '$Energy^2$ Flux ($eV^2$ / sec / $cm^2$ / ster /eV)')
+ self.assertTrue(spd_units_string('e3flux') == '$Energy^3$ Flux ($eV^3$ / sec / $cm^2$ / ster /eV)')
+
if __name__ == '__main__':
unittest.main()
diff --git a/pyspedas/mms/tests/wavpol.py b/pyspedas/mms/tests/wavpol.py
index fdb0f439..e6f24cd0 100644
--- a/pyspedas/mms/tests/wavpol.py
+++ b/pyspedas/mms/tests/wavpol.py
@@ -1,6 +1,6 @@
import unittest
import pyspedas
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
from pyspedas.cotrans.fac_matrix_make import fac_matrix_make
from pyspedas import tinterpol
from pytplot import get_data, store_data
@@ -24,7 +24,7 @@ def test_brst(self):
fac_matrix_make(mms_fgm_name, other_dim='xgse', newname=mms_fgm_name + '_fac_mat')
tinterpol('mms4_fgm_b_gse_srvy_l2_bvec_fac_mat', mms_scm_name)
fac_mat = get_data('mms4_fgm_b_gse_srvy_l2_bvec_fac_mat-itrp')
- scm_data = get_data(mms_scm_name)
+ scm_data = get_data(mms_scm_name, dt=True)
scm_fac = [fac_mat.y[idx, :, :] @ scm_data.y[idx, :] for idx in range(0, len(scm_data.y[:, 0]))]
store_data(mms_scm_name + '_fac', data={'x': scm_data.times, 'y': scm_fac})
# number of points for FFT
diff --git a/pyspedas/omni/__init__.py b/pyspedas/omni/__init__.py
index f10235d6..0a16eb5b 100644
--- a/pyspedas/omni/__init__.py
+++ b/pyspedas/omni/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def data(trange=['2013-11-5', '2013-11-6'],
datatype='1min',
@@ -68,3 +69,7 @@ def data(trange=['2013-11-5', '2013-11-6'],
get_support_data=get_support_data, get_ignore_data=get_ignore_data,varformat=varformat,
varnames=varnames, downloadonly=downloadonly, notplot=notplot,
time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='ACE', instrument='OMNI', label=label)
diff --git a/pyspedas/omni/load.py b/pyspedas/omni/load.py
index 9e6a3c50..717d3e80 100644
--- a/pyspedas/omni/load.py
+++ b/pyspedas/omni/load.py
@@ -1,9 +1,10 @@
+import logging
import warnings
import astropy
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
@@ -26,16 +27,20 @@ def load(trange=['2013-11-5', '2013-11-6'],
pyspedas.omni.data
"""
+ file_res = 24*3600.0
if 'min' in datatype:
pathformat = level + '_' + datatype + '/%Y/omni_' + level + '_' + datatype + '_%Y%m01_v??.cdf'
elif 'hour' in datatype:
pathformat = 'hourly/%Y/omni2_h0_mrg1hr_%Y%m01_v??.cdf'
+ file_res = 24*3600*183.0 # 1 file every 6 months
+ get_ignore_data = True # required to load these files
else:
- raise TypeError("%r are invalid keyword arguments" % datatype)
+ logging.error('Invalid datatype: '+ datatype)
+ return
# find the full remote path names using the trange
- remote_names = dailynames(file_format=pathformat, trange=trange)
+ remote_names = dailynames(file_format=pathformat, trange=trange, res=file_res)
out_files = []
diff --git a/pyspedas/omni/tests/tests.py b/pyspedas/omni/tests/tests.py
index e55e35b9..9d7cc813 100644
--- a/pyspedas/omni/tests/tests.py
+++ b/pyspedas/omni/tests/tests.py
@@ -1,17 +1,14 @@
-
import os
import unittest
-import pandas as pd
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_utc_timestamp_regression(self):
varname = 'BX_GSE'
data_omni = pyspedas.omni.data(trange=['2010-01-01/00:00:00', '2010-01-02/00:00:00'],notplot=True,varformat=varname,time_clip=True)
- date_time = pd.to_datetime(data_omni[varname]['x'],unit='s')
- self.assertTrue(str(date_time[0]) == '2010-01-01 00:00:00')
+ self.assertTrue(str(data_omni[varname]['x'][0]) == '2010-01-01 00:00:00')
def test_load_hro2_data(self):
omni_vars = pyspedas.omni.data()
@@ -40,9 +37,21 @@ def test_load_hro_5min_data(self):
self.assertTrue(data_exists('BZ_GSM'))
self.assertTrue(data_exists('proton_density'))
+ def test_load_hro_hour_data(self):
+ omni_vars = pyspedas.omni.data(level='hro2', datatype='hour', trange=['2013-03-01', '2013-03-02'])
+ self.assertTrue(data_exists('BX_GSE'))
+ self.assertTrue(data_exists('BY_GSE'))
+ self.assertTrue(data_exists('BZ_GSE'))
+ self.assertTrue(data_exists('BY_GSM'))
+ self.assertTrue(data_exists('BZ_GSM'))
+
+ def test_load_invalid_datatype(self):
+ omni_vars = pyspedas.omni.data(datatype='1')
+
def test_downloadonly(self):
files = pyspedas.omni.data(downloadonly=True, trange=['2014-2-15', '2014-2-16'])
self.assertTrue(os.path.exists(files[0]))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/particles/moments/moments_3d_omega_weights.py b/pyspedas/particles/moments/moments_3d_omega_weights.py
index 3a7c4d9f..e5a41918 100644
--- a/pyspedas/particles/moments/moments_3d_omega_weights.py
+++ b/pyspedas/particles/moments/moments_3d_omega_weights.py
@@ -1,6 +1,6 @@
-
import numpy as np
+
def moments_3d_omega_weights(theta, phi, dtheta, dphi):
"""
Helper function used by moments_3d
diff --git a/pyspedas/particles/moments/spd_pgs_moments.py b/pyspedas/particles/moments/spd_pgs_moments.py
index 4104ecef..ef6e31fc 100644
--- a/pyspedas/particles/moments/spd_pgs_moments.py
+++ b/pyspedas/particles/moments/spd_pgs_moments.py
@@ -1,6 +1,7 @@
from pyspedas.particles.moments.moments_3d import moments_3d
+
def spd_pgs_moments(data_in, sc_pot=0):
"""
@@ -19,4 +20,4 @@ def spd_pgs_moments(data_in, sc_pot=0):
Dictionary containing moments
"""
- return moments_3d(data_in, sc_pot=sc_pot)
\ No newline at end of file
+ return moments_3d(data_in, sc_pot=sc_pot)
diff --git a/pyspedas/particles/moments/spd_pgs_moments_tplot.py b/pyspedas/particles/moments/spd_pgs_moments_tplot.py
index d05e96ce..2db31771 100644
--- a/pyspedas/particles/moments/spd_pgs_moments_tplot.py
+++ b/pyspedas/particles/moments/spd_pgs_moments_tplot.py
@@ -5,6 +5,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def spd_pgs_moments_tplot(moments, x=None, prefix='', suffix=''):
"""
Creates tplot variables from moments dictionaries
@@ -35,7 +36,6 @@ def spd_pgs_moments_tplot(moments, x=None, prefix='', suffix=''):
logging.error('Error, the "moments" variable must be a hash table containing the moments')
return
-
for key in moments.keys():
store_data(prefix + '_' + key + suffix, data={'x': x, 'y': moments[key]})
diff --git a/pyspedas/particles/spd_part_products/spd_pgs_do_fac.py b/pyspedas/particles/spd_part_products/spd_pgs_do_fac.py
index fc9c9bba..226b8f6a 100644
--- a/pyspedas/particles/spd_part_products/spd_pgs_do_fac.py
+++ b/pyspedas/particles/spd_part_products/spd_pgs_do_fac.py
@@ -2,6 +2,7 @@
import numpy as np
from astropy.coordinates import spherical_to_cartesian, cartesian_to_spherical
+
def spd_pgs_do_fac(data_in, mat):
"""
Applies field aligned coordinate transformation to input data
@@ -30,4 +31,4 @@ def spd_pgs_do_fac(data_in, mat):
data_out['theta'] = sphere_data[1].value*180.0/np.pi
data_out['phi'] = sphere_data[2].value*180.0/np.pi
- return data_out
\ No newline at end of file
+ return data_out
diff --git a/pyspedas/particles/spd_part_products/spd_pgs_limit_range.py b/pyspedas/particles/spd_part_products/spd_pgs_limit_range.py
index 3b92ab33..d40109a1 100644
--- a/pyspedas/particles/spd_part_products/spd_pgs_limit_range.py
+++ b/pyspedas/particles/spd_part_products/spd_pgs_limit_range.py
@@ -67,4 +67,4 @@ def spd_pgs_limit_range(data, phi=None, theta=None, energy=None):
data['bins'][data['energy'] < energy[0]] = 0
data['bins'][data['energy'] > energy[1]] = 0
- return data
\ No newline at end of file
+ return data
diff --git a/pyspedas/particles/spd_part_products/spd_pgs_make_e_spec.py b/pyspedas/particles/spd_part_products/spd_pgs_make_e_spec.py
index 400e45d2..cd75d9e2 100644
--- a/pyspedas/particles/spd_part_products/spd_pgs_make_e_spec.py
+++ b/pyspedas/particles/spd_part_products/spd_pgs_make_e_spec.py
@@ -8,6 +8,7 @@
except ImportError:
nanmean = np.nanmean
+
def spd_pgs_make_e_spec(data_in):
"""
Builds energy spectrogram from the particle data structure
diff --git a/pyspedas/particles/spd_part_products/spd_pgs_make_phi_spec.py b/pyspedas/particles/spd_part_products/spd_pgs_make_phi_spec.py
index 8f7ce2c5..00a9785f 100644
--- a/pyspedas/particles/spd_part_products/spd_pgs_make_phi_spec.py
+++ b/pyspedas/particles/spd_part_products/spd_pgs_make_phi_spec.py
@@ -10,6 +10,7 @@
except ImportError:
nansum = np.nansum
+
def spd_pgs_make_phi_spec(data_in, resolution=None):
"""
Builds phi (longitudinal) spectrogram from the particle data structure
diff --git a/pyspedas/particles/spd_part_products/spd_pgs_make_theta_spec.py b/pyspedas/particles/spd_part_products/spd_pgs_make_theta_spec.py
index f29c8a4e..3b5f8b98 100644
--- a/pyspedas/particles/spd_part_products/spd_pgs_make_theta_spec.py
+++ b/pyspedas/particles/spd_part_products/spd_pgs_make_theta_spec.py
@@ -10,6 +10,7 @@
except ImportError:
nansum = np.nansum
+
def spd_pgs_make_theta_spec(data_in, resolution=None, colatitude=False):
"""
Builds theta (latitudinal) spectrogram from simplified particle data structure.
@@ -102,5 +103,3 @@ def spd_pgs_make_theta_spec(data_in, resolution=None, colatitude=False):
y = y[1:]
return (y, ave)
-
-
diff --git a/pyspedas/particles/spd_part_products/spd_pgs_make_tplot.py b/pyspedas/particles/spd_part_products/spd_pgs_make_tplot.py
index 37b1f839..fe7b598b 100644
--- a/pyspedas/particles/spd_part_products/spd_pgs_make_tplot.py
+++ b/pyspedas/particles/spd_part_products/spd_pgs_make_tplot.py
@@ -1,9 +1,10 @@
-
+import logging
import numpy as np
from pytplot import store_data, options
from pyspedas.particles.spd_units_string import spd_units_string
+
def spd_pgs_make_tplot(name, x=None, y=None, z=None, units='', ylog=False, zlog=True, colorbar='spedas', ytitle=None, ysubtitle=''):
"""
Create tplot variable with standard spectrogram settings
@@ -40,7 +41,7 @@ def spd_pgs_make_tplot(name, x=None, y=None, z=None, units='', ylog=False, zlog=
"""
if not isinstance(x, np.ndarray) or not isinstance(y, np.ndarray) or not isinstance(z, np.ndarray) :
- print('Error, must specify x, y and z parameters')
+ logging.error('Error, must specify x, y and z parameters')
return
if ytitle is None:
@@ -54,4 +55,4 @@ def spd_pgs_make_tplot(name, x=None, y=None, z=None, units='', ylog=False, zlog=
options(name, 'ysubtitle', ysubtitle)
options(name, 'ztitle', spd_units_string(units, units_only=True))
options(name, 'Colormap', colorbar)
- return name
\ No newline at end of file
+ return name
diff --git a/pyspedas/particles/spd_part_products/spd_pgs_progress_update.py b/pyspedas/particles/spd_part_products/spd_pgs_progress_update.py
index b0ce2be3..b501e3b2 100644
--- a/pyspedas/particles/spd_part_products/spd_pgs_progress_update.py
+++ b/pyspedas/particles/spd_part_products/spd_pgs_progress_update.py
@@ -4,6 +4,7 @@
logging.captureWarnings(True)
logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
+
def spd_pgs_progress_update(last_update_time=None, current_sample=None, total_samples=None, type_string=None):
"""
Helper routine prints status message indicating completion percent
@@ -40,4 +41,4 @@ def spd_pgs_progress_update(last_update_time=None, current_sample=None, total_sa
logging.info(type_string + ' is ' + str(round(100.0*current_sample/total_samples)) + '% done.')
last_update_time = time()
- return last_update_time
\ No newline at end of file
+ return last_update_time
diff --git a/pyspedas/particles/spd_part_products/spd_pgs_regrid.py b/pyspedas/particles/spd_part_products/spd_pgs_regrid.py
index 41fbaa41..d2d7e96c 100644
--- a/pyspedas/particles/spd_part_products/spd_pgs_regrid.py
+++ b/pyspedas/particles/spd_part_products/spd_pgs_regrid.py
@@ -1,14 +1,15 @@
-
+import logging
import numpy as np
from scipy.interpolate import NearestNDInterpolator
from astropy.coordinates import spherical_to_cartesian
+
def spd_pgs_regrid(data, regrid_dimen):
"""
"""
if len(regrid_dimen) != 2:
- print('Invalid regrid dimensions; the dimensions should be [n_phi, n_theta]')
+ logging.error('Invalid regrid dimensions; the dimensions should be [n_phi, n_theta]')
return
n_energy = len(data['energy'][:, 0])
@@ -65,6 +66,7 @@ def spd_pgs_regrid(data, regrid_dimen):
return output
+
def griddata(phi, theta, data):
r = np.ones(len(phi))
phi_rad = phi*np.pi/180.0
@@ -72,4 +74,4 @@ def griddata(phi, theta, data):
cart_temp = spherical_to_cartesian(r, theta_rad, phi_rad)
points = np.stack(cart_temp).T
- return NearestNDInterpolator(points, data)
\ No newline at end of file
+ return NearestNDInterpolator(points, data)
diff --git a/pyspedas/particles/spd_slice2d/slice1d_plot.py b/pyspedas/particles/spd_slice2d/slice1d_plot.py
index 50eb9d2e..d2ffc4b7 100644
--- a/pyspedas/particles/spd_slice2d/slice1d_plot.py
+++ b/pyspedas/particles/spd_slice2d/slice1d_plot.py
@@ -1,3 +1,4 @@
+import logging
import numpy as np
import matplotlib.pyplot as plt
from .slice2d_getinfo import slice2d_getinfo
@@ -30,7 +31,7 @@ def plot(the_slice, direction, value, xrange=None, yrange=None):
direction = direction.lower()
if direction not in ['x', 'y']:
- print('Invalid direction specified. Valid options are: x, y')
+ logging.error('Invalid direction specified. Valid options are: x, y')
return
if xrange is None:
diff --git a/pyspedas/particles/spd_slice2d/slice2d.py b/pyspedas/particles/spd_slice2d/slice2d.py
index 43cb431b..69cc7dd2 100644
--- a/pyspedas/particles/spd_slice2d/slice2d.py
+++ b/pyspedas/particles/spd_slice2d/slice2d.py
@@ -1,3 +1,4 @@
+import logging
import numpy as np
from .slice2d_intrange import slice2d_intrange
@@ -157,7 +158,7 @@ def slice2d(dists,
if trange is None:
if time is None:
- print('Please specify a time or time range over which to compute the slice.')
+ logging.error('Please specify a time or time range over which to compute the slice.')
return
if window is None and samples is None:
# use single closest distribution by default
@@ -167,7 +168,7 @@ def slice2d(dists,
'perp_xy', 'perp_xz', 'perp_yz', 'b_exb', 'perp1-perp2']
if rotation not in valid_rotations:
- print('Invalid rotation requested; valid options: ' + ', '.join(valid_rotations))
+ logging.error('Invalid rotation requested; valid options: ' + ', '.join(valid_rotations))
return
if interpolation == '2d':
@@ -181,7 +182,7 @@ def slice2d(dists,
if resolution is None:
resolution = 500
else:
- print('Unknown interpolation method: ' + interpolation + '; valid options: "geometric", "2d"')
+ logging.error('Unknown interpolation method: ' + interpolation + '; valid options: "geometric", "2d"')
return
if time is not None:
@@ -288,7 +289,7 @@ def slice2d(dists,
geo_shift = [0, 0, 0]
if subtract_bulk:
- vectors = slice2d_subtract(rot_matrix['vectors'], vbulk)
+ vectors = slice2d_subtract(rot_matrix['vectors'], rot_matrix['vbulk'])
if vectors is not None:
rot_matrix['vectors'] = vectors
@@ -338,5 +339,5 @@ def slice2d(dists,
'n_samples': len(times_ind),
**the_slice}
- print('Finished slice at ' + time_string(tr[0], fmt='%Y-%m-%d %H:%M:%S.%f'))
+ logging.info('Finished slice at ' + time_string(tr[0], fmt='%Y-%m-%d %H:%M:%S.%f'))
return out
diff --git a/pyspedas/particles/spd_slice2d/slice2d_collate.py b/pyspedas/particles/spd_slice2d/slice2d_collate.py
index ecf34902..b678bfed 100644
--- a/pyspedas/particles/spd_slice2d/slice2d_collate.py
+++ b/pyspedas/particles/spd_slice2d/slice2d_collate.py
@@ -1,3 +1,4 @@
+import logging
import warnings
from copy import deepcopy
import numpy as np
@@ -40,7 +41,7 @@ def slice2d_collate(data, weight, sphere, previous_out=None, sum_samples=False):
dp_in = dp_in[valid]
dt_in = dt_in[valid]
else:
- print('No valid data in distribution(s).')
+ logging.error('No valid data in distribution(s).')
return
if previous_out is None:
@@ -68,4 +69,3 @@ def slice2d_collate(data, weight, sphere, previous_out=None, sum_samples=False):
'dp': dp_out,
'dt': dt_out
}
-
diff --git a/pyspedas/particles/spd_slice2d/slice2d_custom_rotation.py b/pyspedas/particles/spd_slice2d/slice2d_custom_rotation.py
index 0b2b8110..55d2b68a 100644
--- a/pyspedas/particles/spd_slice2d/slice2d_custom_rotation.py
+++ b/pyspedas/particles/spd_slice2d/slice2d_custom_rotation.py
@@ -1,3 +1,4 @@
+import logging
import numpy as np
from .slice2d_get_support import slice2d_get_support
@@ -23,7 +24,7 @@ def slice2d_custom_rotation(custom_rotation=None,
matrix = slice2d_get_support(custom_rotation, trange, matrix=True)
- print('Applying custom rotation')
+ logging.info('Applying custom rotation')
# Transform particle and support vectors
if vectors is not None:
diff --git a/pyspedas/particles/spd_slice2d/slice2d_geo.py b/pyspedas/particles/spd_slice2d/slice2d_geo.py
index 461d2cea..462fd941 100644
--- a/pyspedas/particles/spd_slice2d/slice2d_geo.py
+++ b/pyspedas/particles/spd_slice2d/slice2d_geo.py
@@ -1,8 +1,10 @@
+import logging
from copy import deepcopy
from time import time
import numpy as np
from pyspedas.particles.spd_slice2d.quaternions import qtom, qcompose
+
def slice2d_geo(data, resolution, r, phi, theta, dr, dp, dt, orient_matrix=None, rotation_matrix=None,
custom_matrix=None, msg_prefix='', shift=None, average_angle=None, sum_angle=None):
"""
@@ -150,7 +152,7 @@ def slice2d_geo(data, resolution, r, phi, theta, dr, dp, dt, orient_matrix=None,
else:
num_angles = na
msg = msg_prefix + str(int(100*((j+1)*num_points + i)/(num_angles*num_points))) + '% complete'
- print(msg)
+ logging.info(msg)
previous_time = time()
# average areas where bins overlapped
diff --git a/pyspedas/particles/spd_slice2d/slice2d_get_data.py b/pyspedas/particles/spd_slice2d/slice2d_get_data.py
index 5416c369..16da6f36 100644
--- a/pyspedas/particles/spd_slice2d/slice2d_get_data.py
+++ b/pyspedas/particles/spd_slice2d/slice2d_get_data.py
@@ -1,3 +1,4 @@
+import logging
from copy import deepcopy
import numpy as np
@@ -45,7 +46,7 @@ def slice2d_get_data(dists, trange=None, energy=False, erange=None):
times_idx = slice2d_intrange(dists, trange)
if len(times_idx) == 0:
- print('No data in the time range')
+ logging.error('No data in the time range')
return
weight = np.zeros(dists[0]['bins'][:, :, :].shape)
diff --git a/pyspedas/particles/spd_slice2d/slice2d_plot.py b/pyspedas/particles/spd_slice2d/slice2d_plot.py
index 1a6ab693..f5cc8f15 100644
--- a/pyspedas/particles/spd_slice2d/slice2d_plot.py
+++ b/pyspedas/particles/spd_slice2d/slice2d_plot.py
@@ -10,10 +10,11 @@ def plot(the_slice,
xrange=None,
yrange=None,
zrange=None,
- colormap='spedas',
+ colormap=None,
olines=8,
contours=False,
plotsize=10,
+ title=None,
save_png=None,
save_jpeg=None,
save_svg=None,
@@ -36,6 +37,14 @@ def plot(the_slice,
else:
spec_options['norm'] = mpl.colors.LogNorm(vmin=zrange[0], vmax=zrange[1])
+ style = pytplot.tplot_opt_glob.get('style')
+
+ if style is None:
+ if colormap is None:
+ colormap = 'spedas'
+ else:
+ plt.style.use(style)
+
if colormap == 'spedas':
_colors = pytplot.spedas_colorbar
spd_map = [(np.array([r, g, b])).astype(np.float64) / 256 for r, g, b in zip(_colors.r, _colors.g, _colors.b)]
@@ -45,6 +54,10 @@ def plot(the_slice,
spec_options['cmap'] = cmap
+ char_size = pytplot.tplot_opt_glob.get('charsize')
+ if char_size is None:
+ char_size = 12
+
fig, axes = plt.subplots()
fig.set_size_inches(plotsize, plotsize)
@@ -54,11 +67,17 @@ def plot(the_slice,
if yrange is not None:
axes.set_ylim(yrange)
- info = slice2d_getinfo(the_slice)
+ axis_font_size = pytplot.tplot_opt_glob.get('axis_font_size')
- axes.set_title(info['title'])
- axes.set_ylabel(info['ytitle'])
- axes.set_xlabel(info['xtitle'])
+ if axis_font_size is not None:
+ axes.tick_params(axis='x', labelsize=axis_font_size)
+ axes.tick_params(axis='y', labelsize=axis_font_size)
+
+ info = slice2d_getinfo(the_slice, title=title)
+
+ axes.set_title(info['title'], fontsize=char_size)
+ axes.set_ylabel(info['ytitle'], fontsize=char_size)
+ axes.set_xlabel(info['xtitle'], fontsize=char_size)
fig.subplots_adjust(left=0.14, right=0.86, top=0.86, bottom=0.14)
@@ -66,11 +85,14 @@ def plot(the_slice,
pad, width = 0.02, 0.01
cax = fig.add_axes([box.xmax + pad, box.ymin, width, box.height])
+ if axis_font_size is not None:
+ cax.tick_params(labelsize=axis_font_size)
+
im = axes.pcolormesh(the_slice['xgrid'], the_slice['ygrid'], the_slice['data'].T, **spec_options)
colorbar = fig.colorbar(im, cax=cax)
- colorbar.set_label(info['ztitle'])
+ colorbar.set_label(info['ztitle'], fontsize=char_size)
# draw lines at the origin
axes.axvline(x=0, linestyle=(0, (5, 10)), color='black')
diff --git a/pyspedas/particles/spd_slice2d/slice2d_rotate.py b/pyspedas/particles/spd_slice2d/slice2d_rotate.py
index 2898e367..2bc72489 100644
--- a/pyspedas/particles/spd_slice2d/slice2d_rotate.py
+++ b/pyspedas/particles/spd_slice2d/slice2d_rotate.py
@@ -1,3 +1,4 @@
+import logging
import numpy as np
from .spd_cal_rot import spd_cal_rot
@@ -12,11 +13,11 @@ def slice2d_rotate(rotation=None, vectors=None, bfield=None, vbulk=None, sunvec=
req_vbulk = ['bv', 'be', 'xvel', 'perp', 'perp2', 'b_exb', 'perp1-perp2']
if bfield is None and rotation in req_bfield:
- print('Rotation: ' + rotation + ' requires B-field data')
+ logging.error('Rotation: ' + rotation + ' requires B-field data')
return
if vbulk is None and rotation in req_vbulk:
- print('Rotation: ' + rotation + ' requires bulk velocity data')
+ logging.error('Rotation: ' + rotation + ' requires bulk velocity data')
return
if rotation == 'bv':
@@ -48,11 +49,11 @@ def slice2d_rotate(rotation=None, vectors=None, bfield=None, vbulk=None, sunvec=
# [B, (BxV)xB] (this is the parallel - perp 2 plane)
matrix = spd_cal_rot(bfield, np.cross(np.cross(bfield, vbulk), bfield))
else:
- print('Unknown rotation: ' + rotation)
+ logging.error('Unknown rotation: ' + rotation)
return
if rotation != 'xy':
- print('Aligning slice plane to: ' + rotation)
+ logging.info('Aligning slice plane to: ' + rotation)
# Transform particle and support vectors
if vectors is not None:
@@ -62,10 +63,10 @@ def slice2d_rotate(rotation=None, vectors=None, bfield=None, vbulk=None, sunvec=
transformed[vector_idx] = (matrix.T @ vectors[vector_idx, :]).flatten()
vectors = transformed
if vbulk is not None:
- vbulk = matrix @ vbulk
+ vbulk = matrix.T @ vbulk
if bfield is not None:
- bfield = matrix @ bfield
+ bfield = matrix.T @ bfield
if sunvec is not None:
- sunvec = matrix @ sunvec
+ sunvec = matrix.T @ sunvec
return {'matrix': matrix, 'vectors': vectors, 'bfield': bfield, 'vbulk': vbulk, 'sunvec': sunvec}
diff --git a/pyspedas/particles/spd_slice2d/slice2d_smooth.py b/pyspedas/particles/spd_slice2d/slice2d_smooth.py
index 5cd018e3..1f3cd932 100644
--- a/pyspedas/particles/spd_slice2d/slice2d_smooth.py
+++ b/pyspedas/particles/spd_slice2d/slice2d_smooth.py
@@ -1,3 +1,4 @@
+import logging
import numpy as np
import scipy.signal as signal
@@ -42,6 +43,6 @@ def slice2d_smooth(the_slice, width):
the_slice['data'] = signal.convolve2d(the_slice['data'], kernel, mode='same')
else:
- print('Smoothing not applied. Smoothing value must be >= 2')
+ logging.error('Smoothing not applied. Smoothing value must be >= 2')
return the_slice
diff --git a/pyspedas/particles/spd_slice2d/slice2d_subtract.py b/pyspedas/particles/spd_slice2d/slice2d_subtract.py
index b8e5fe04..475a29ce 100644
--- a/pyspedas/particles/spd_slice2d/slice2d_subtract.py
+++ b/pyspedas/particles/spd_slice2d/slice2d_subtract.py
@@ -1,3 +1,4 @@
+import logging
import numpy as np
@@ -10,11 +11,11 @@ def slice2d_subtract(vectors=None, velocity=None):
return
if vectors.shape[1] != 3 or len(velocity) != 3:
- print('Invalid vector dimensions, cannot subtract velocity')
+ logging.error('Invalid vector dimensions, cannot subtract velocity')
return
if np.sum(~np.isfinite((velocity))):
- print('Invalid bulk velocity data, cannot subtract velocity')
+ logging.error('Invalid bulk velocity data, cannot subtract velocity')
return
if not isinstance(velocity, np.ndarray):
diff --git a/pyspedas/particles/spd_slice2d/tplot_average.py b/pyspedas/particles/spd_slice2d/tplot_average.py
index 154cf481..48375564 100644
--- a/pyspedas/particles/spd_slice2d/tplot_average.py
+++ b/pyspedas/particles/spd_slice2d/tplot_average.py
@@ -1,9 +1,10 @@
+import logging
import numpy as np
from pytplot import get_data
-from pyspedas import time_double
+from pyspedas import time_string
-def tplot_average(tvar, trange):
+def tplot_average(tvar, trange, quiet=False):
"""
Returns the average value of a tplot variable over a specified time range.
@@ -19,22 +20,23 @@ def tplot_average(tvar, trange):
-------
Average value of the tplot variable
"""
- data = get_data(tvar)
+ data = get_data(tvar, dt=True)
if data is None:
- print('Error reading: ' + tvar)
+ logging.error('Error reading: ' + tvar)
return
if len(trange) != 2:
- print('Error: time range must be two element array.')
+ logging.error('Error: time range must be two element array.')
return
- trange = time_double(trange)
+ trange = time_string(trange)
- t0 = np.min(trange)
- t1 = np.max(trange)
+ t0 = np.datetime64(trange[0])
+ t1 = np.datetime64(trange[-1])
- print('Averaging ' + tvar)
+ if not quiet:
+ logging.info('Averaging ' + tvar)
# find the data within the time range
indices = np.argwhere((data.times <= t1) & (data.times >= t0))
diff --git a/pyspedas/particles/spd_units_string.py b/pyspedas/particles/spd_units_string.py
index 3a7887be..b9643e79 100644
--- a/pyspedas/particles/spd_units_string.py
+++ b/pyspedas/particles/spd_units_string.py
@@ -35,4 +35,4 @@ def spd_units_string(units, units_only=False):
if units_only:
return out[1]
- return ''.join(out)
\ No newline at end of file
+ return ''.join(out)
diff --git a/pyspedas/poes/__init__.py b/pyspedas/poes/__init__.py
index 90c15c63..3d914bcb 100644
--- a/pyspedas/poes/__init__.py
+++ b/pyspedas/poes/__init__.py
@@ -1,5 +1,5 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
def sem(trange=['2018-11-5', '2018-11-6'],
@@ -66,3 +66,7 @@ def sem(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='sem', probe=probe, trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='POES', instrument='sem2', label=label)
diff --git a/pyspedas/poes/load.py b/pyspedas/poes/load.py
index 4bd3162c..e6f865cb 100644
--- a/pyspedas/poes/load.py
+++ b/pyspedas/poes/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/poes/tests/tests.py b/pyspedas/poes/tests/tests.py
index b9d88702..77bbb82d 100644
--- a/pyspedas/poes/tests/tests.py
+++ b/pyspedas/poes/tests/tests.py
@@ -1,11 +1,15 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
from pytplot import get_data
import pyspedas
+
class LoadTestCases(unittest.TestCase):
+ def test_load_notplot(self):
+ sem_vars = pyspedas.poes.sem(notplot=True)
+ self.assertTrue('ted_ele_tel0_low_eflux' in sem_vars)
+
def test_load_sem_data(self):
sem_vars = pyspedas.poes.sem(time_clip=True)
self.assertTrue(data_exists('ted_ele_tel0_low_eflux'))
@@ -23,5 +27,6 @@ def test_downloadonly(self):
files = pyspedas.poes.sem(downloadonly=True, probe='noaa19')
self.assertTrue(os.path.exists(files[0]))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/polar/__init__.py b/pyspedas/polar/__init__.py
index 5a1d417d..d4695a68 100644
--- a/pyspedas/polar/__init__.py
+++ b/pyspedas/polar/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def mfe(trange=['2003-10-28', '2003-10-29'],
datatype='k0',
@@ -720,3 +721,7 @@ def orbit(trange=['2003-10-28', '2003-10-29'],
"""
return load(instrument='spha', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Polar', instrument=instrument, label=label)
diff --git a/pyspedas/polar/load.py b/pyspedas/polar/load.py
index 897e010f..a28a5455 100644
--- a/pyspedas/polar/load.py
+++ b/pyspedas/polar/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/polar/tests/tests.py b/pyspedas/polar/tests/tests.py
index c7e8ca74..afa807c0 100644
--- a/pyspedas/polar/tests/tests.py
+++ b/pyspedas/polar/tests/tests.py
@@ -1,10 +1,9 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_downloadonly(self):
files = pyspedas.polar.efi(downloadonly=True)
@@ -53,10 +52,15 @@ def test_load_pixie_data(self):
pixie_vars = pyspedas.polar.pixie()
self.assertTrue(data_exists('TXF_HIGH'))
+ def test_load_vis_data(self):
+ vis_vars = pyspedas.polar.vis(notplot=True)
+ self.assertTrue('Image_Counts' in vis_vars)
+
def test_load_orbit_data(self):
orbit_vars = pyspedas.polar.orbit()
self.assertTrue(data_exists('AVG_SPIN_RATE'))
self.assertTrue(data_exists('SPIN_PHASE'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/psp/__init__.py b/pyspedas/psp/__init__.py
index ed418f5f..7909a4dc 100644
--- a/pyspedas/psp/__init__.py
+++ b/pyspedas/psp/__init__.py
@@ -17,7 +17,8 @@ def fields(trange=['2018-11-5', '2018-11-6'],
no_update=False,
time_clip=False,
username=None,
- password=None
+ password=None,
+ last_version=False
):
"""
This function loads Parker Solar Probe FIELDS data
@@ -100,6 +101,8 @@ def fields(trange=['2018-11-5', '2018-11-6'],
password: str
Password to use for authentication
+ last_version: bool
+ If True, only download the highest-numbered file version
Returns
----------
List of tplot variables created.
@@ -135,12 +138,14 @@ def fields(trange=['2018-11-5', '2018-11-6'],
instrument='fields', trange=trange, datatype=datatype, spec_types=spec_types, level=level,
suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames,
downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update,
- username=username, password=password
+ username=username, password=password,last_version=last_version
)
if loaded_vars is None or notplot or downloadonly:
return loaded_vars
+ qf_root = 'psp_fld_l2_quality_flags'+suffix
+
# If variables are loaded that quality flag filtering supports --
# Make sure the quality flag variable is also loaded and linked.
mag_rtnvars = [x for x in loaded_vars if 'fld_l2_mag_RTN' in x ]
@@ -150,9 +155,9 @@ def fields(trange=['2018-11-5', '2018-11-6'],
& ('psp_fld_l2_quality_flags'+suffix not in loaded_vars):
loaded_extra = load(
instrument='fields', trange=trange, datatype=datatype, spec_types=spec_types, level=level,
- suffix=suffix, get_support_data=True, varformat=varformat, varnames=['psp_fld_l2_quality_flags'],
+ suffix=suffix, get_support_data=True, varformat=varformat, varnames=['psp_fld_l2_quality_flags'],
downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update,
- username=username, password=password
+ username=username, password=password,last_version=last_version
)
qf_root = 'psp_fld_l2_quality_flags'+suffix if 'psp_fld_l2_quality_flags'+suffix in loaded_extra else None
loaded_vars += loaded_extra
@@ -183,7 +188,8 @@ def spc(trange=['2018-11-5', '2018-11-6'],
no_update=False,
time_clip=False,
username=None,
- password=None
+ password=None,
+ last_version=False
):
"""
This function loads Parker Solar Probe Solar Probe Cup data
@@ -240,6 +246,9 @@ def spc(trange=['2018-11-5', '2018-11-6'],
password: str
Password to use for authentication
+ last_version: bool
+ If True, only download the highest-numbered file version
+
Returns
----------
List of tplot variables created.
@@ -262,7 +271,7 @@ def spc(trange=['2018-11-5', '2018-11-6'],
return load(instrument='spc', trange=trange, datatype=datatype, level=level, suffix=suffix,
get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly,
- notplot=notplot, time_clip=time_clip, no_update=no_update, username=username, password=password)
+ notplot=notplot, time_clip=time_clip, no_update=no_update, username=username, password=password, last_version=last_version)
def spe(trange=['2018-11-5', '2018-11-6'],
datatype='spa_sf1_32e',
@@ -274,7 +283,8 @@ def spe(trange=['2018-11-5', '2018-11-6'],
downloadonly=False,
notplot=False,
no_update=False,
- time_clip=False
+ time_clip=False,
+ last_version=False
):
"""
This function loads Parker Solar Probe SWEAP/SPAN-e data
@@ -326,6 +336,9 @@ def spe(trange=['2018-11-5', '2018-11-6'],
time_clip: bool
Time clip the variables to exactly the range specified in the trange keyword
+ last_version: bool
+ If True, only download the highest-numbered file version
+
Returns
----------
List of tplot variables created.
@@ -333,7 +346,7 @@ def spe(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='spe', trange=trange, datatype=datatype, level=level,
suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames,
- downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+ downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update,last_version=last_version)
def spi(trange=['2018-11-5', '2018-11-6'],
datatype='sf00_l3_mom',
@@ -347,7 +360,8 @@ def spi(trange=['2018-11-5', '2018-11-6'],
no_update=False,
time_clip=False,
username=None,
- password=None
+ password=None,
+ last_version=False
):
"""
This function loads Parker Solar Probe SWEAP/SPAN-i data
@@ -406,6 +420,9 @@ def spi(trange=['2018-11-5', '2018-11-6'],
password: str
Password to use for authentication
+ last_version: bool
+ If True, only download the highest-numbered file version
+
Returns
----------
List of tplot variables created.
@@ -419,7 +436,7 @@ def spi(trange=['2018-11-5', '2018-11-6'],
return load(instrument='spi', trange=trange, datatype=datatype, level=level,
suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames,
downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update,
- username=username, password=password
+ username=username, password=password,last_version=last_version
)
def epihi(trange=['2018-11-5', '2018-11-6'],
@@ -432,7 +449,8 @@ def epihi(trange=['2018-11-5', '2018-11-6'],
downloadonly=False,
notplot=False,
no_update=False,
- time_clip=False):
+ time_clip=False,
+ last_version=False):
"""
This function loads Parker Solar Probe ISoIS/EPI-Hi data
@@ -476,6 +494,9 @@ def epihi(trange=['2018-11-5', '2018-11-6'],
time_clip: bool
Time clip the variables to exactly the range specified in the trange keyword
+ last_version: bool
+ If True, only download the highest-numbered file version
+
Returns
----------
List of tplot variables created.
@@ -483,7 +504,7 @@ def epihi(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='epihi', trange=trange, datatype=datatype, level=level,
suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames,
- downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+ downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update,last_version=last_version)
def epilo(trange=['2018-11-5', '2018-11-6'],
datatype='pe',
@@ -495,7 +516,8 @@ def epilo(trange=['2018-11-5', '2018-11-6'],
downloadonly=False,
notplot=False,
no_update=False,
- time_clip=False):
+ time_clip=False,
+ last_version=False):
"""
This function loads Parker Solar Probe ISoIS/EPI-Lo data
@@ -539,6 +561,9 @@ def epilo(trange=['2018-11-5', '2018-11-6'],
time_clip: bool
Time clip the variables to exactly the range specified in the trange keyword
+ last_version: bool
+ If True, only download the highest-numbered file version
+
Returns
----------
List of tplot variables created.
@@ -546,7 +571,7 @@ def epilo(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='epilo', trange=trange, datatype=datatype, level=level,
suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames,
- downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+ downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update,last_version=last_version)
def epi(trange=['2018-11-5', '2018-11-6'],
datatype='summary',
@@ -558,7 +583,8 @@ def epi(trange=['2018-11-5', '2018-11-6'],
downloadonly=False,
notplot=False,
no_update=False,
- time_clip=False):
+ time_clip=False,
+ last_version=False):
"""
This function loads Parker Solar Probe ISoIS/EPI (merged summary) data
@@ -602,6 +628,9 @@ def epi(trange=['2018-11-5', '2018-11-6'],
time_clip: bool
Time clip the variables to exactly the range specified in the trange keyword
+ last_version: bool
+ If True, only download the highest-numbered file version
+
Returns
----------
List of tplot variables created.
@@ -609,5 +638,5 @@ def epi(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='epi', trange=trange, datatype=datatype, level=level,
suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames,
- downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+ downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update,last_version=last_version)
diff --git a/pyspedas/psp/load.py b/pyspedas/psp/load.py
index 03f7e178..257844f9 100644
--- a/pyspedas/psp/load.py
+++ b/pyspedas/psp/load.py
@@ -1,17 +1,17 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
-
+from .rfs import rfs_variables_to_load
from .config import CONFIG
-def load(trange=['2018-11-5', '2018-11-6'],
- instrument='fields',
- datatype='mag_RTN',
- spec_types=None, # for DFB AC spectral data
+def load(trange=['2018-11-5', '2018-11-6'],
+ instrument='fields',
+ datatype='mag_RTN',
+ spec_types=None, # for DFB AC spectral data
level='l2',
- suffix='',
- get_support_data=False,
+ suffix='',
+ get_support_data=False,
varformat=None,
varnames=[],
downloadonly=False,
@@ -19,8 +19,8 @@ def load(trange=['2018-11-5', '2018-11-6'],
no_update=False,
time_clip=False,
username=None,
- password=None
- ):
+ password=None,
+ last_version=False):
"""
This function loads Parker Solar Probe data into tplot variables; this function is not
meant to be called directly; instead, see the wrappers:
@@ -33,7 +33,6 @@ def load(trange=['2018-11-5', '2018-11-6'],
psp.epi ISoIS/EPI (merged Hi-Lo) data
"""
-
# remote path formats generally are going to be all lowercase except for
# on the Berkeley FIELDS server
if (username is not None) and (datatype in ['mag_RTN_1min',
@@ -58,7 +57,7 @@ def load(trange=['2018-11-5', '2018-11-6'],
if datatype in ['mag_rtn', 'mag_sc']:
pathformat = instrument + '/' + level + '/' + datatype + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d%H_v??.cdf'
file_resolution = 6*3600.
- elif datatype in ['mag_rtn_1min', 'mag_sc_1min', 'rfs_hfr', 'rfs_lfr', 'rfs_burst', 'f2_100bps']:
+ elif datatype in ['mag_rtn_1min', 'mag_sc_1min', 'rfs_hfr', 'rfs_lfr', 'rfs_burst', 'f2_100bps', 'aeb']:
pathformat = instrument + '/' + level + '/' + datatype + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d_v??.cdf'
elif datatype in ['mag_rtn_4_per_cycle', 'mag_rtn_4_sa_per_cyc']:
pathformat = instrument + '/' + level + '/mag_rtn_4_per_cycle/%Y/psp_fld_' + level + '_mag_rtn_4_sa_per_cyc_%Y%m%d_v??.cdf'
@@ -71,7 +70,7 @@ def load(trange=['2018-11-5', '2018-11-6'],
for item in spec_types:
loaded_data = load(trange=trange, instrument=instrument, datatype=datatype + '_' + item, level=level,
suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames,
- downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+ downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, last_version=last_version)
if loaded_data != []:
out_vars.extend(loaded_data)
return out_vars
@@ -95,14 +94,14 @@ def load(trange=['2018-11-5', '2018-11-6'],
else:
pathformat = instrument + '/' + level + '/' + datatype + '/%Y/%m/psp_fld_' + level + '_' + datatype + '_%Y%m%d%H_v??.cdf'
- # unpublished data (only download v02 mag data which would be published)
+ # unpublished data
elif username != None:
if datatype in ['mag_RTN', 'mag_SC']:
- pathformat = instrument + '/' + level + '/' + datatype + '/%Y/%m/psp_fld_' + level + '_' + datatype + '_%Y%m%d%H_v02.cdf'
+ pathformat = instrument + '/' + level + '/' + datatype + '/%Y/%m/psp_fld_' + level + '_' + datatype + '_%Y%m%d%H_v??.cdf'
file_resolution = 6*3600.
elif datatype in ['mag_RTN_1min', 'mag_RTN_4_Sa_per_Cyc', 'mag_SC_1min', 'mag_SC_4_Sa_per_Cyc']:
- pathformat = instrument + '/' + level + '/' + datatype + '/%Y/%m/psp_fld_' + level + '_' + datatype + '_%Y%m%d_v02.cdf'
+ pathformat = instrument + '/' + level + '/' + datatype + '/%Y/%m/psp_fld_' + level + '_' + datatype + '_%Y%m%d_v??.cdf'
elif datatype == 'sqtn_rfs_V1V2':
pathformat = instrument + '/' + level + '/' + datatype + '/%Y/%m/psp_fld_' + level + '_' + datatype + '_%Y%m%d_v?.?.cdf'
@@ -115,6 +114,10 @@ def load(trange=['2018-11-5', '2018-11-6'],
pathformat = instrument + '/' + level + '/' + datatype + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d%H_v??.cdf'
file_resolution = 6*3600.
+ # Files on Berkeley server are stored in monthly directories
+ if username != None:
+ pathformat = pathformat.replace('/%Y/psp_fld', '/%Y/%m/psp_fld')
+
elif instrument == 'spc':
prefix = 'psp_spc_'
if username is None:
@@ -150,7 +153,7 @@ def load(trange=['2018-11-5', '2018-11-6'],
if username is None:
files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'],
- local_path=CONFIG['local_data_dir'], no_download=no_update)
+ local_path=CONFIG['local_data_dir'], no_download=no_update,last_version=last_version)
else:
if instrument == 'fields':
try:
@@ -158,22 +161,22 @@ def load(trange=['2018-11-5', '2018-11-6'],
files = download(
remote_file=remote_names, remote_path=CONFIG['fields_remote_data_dir'],
local_path=CONFIG['local_data_dir'], no_download=no_update,
- username=username, password=password, basic_auth=True
+ username=username, password=password, basic_auth=True,last_version=last_version
)
except:
files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'],
- local_path=CONFIG['local_data_dir'], no_download=no_update)
+ local_path=CONFIG['local_data_dir'], no_download=no_update,last_version=last_version)
elif instrument in ['spc','spi']:
try:
print("Downloading unpublished SWEAP Data....")
files = download(
remote_file=remote_names, remote_path=CONFIG['sweap_remote_data_dir'],
local_path=CONFIG['local_data_dir'], no_download=no_update,
- username=username, password=password, basic_auth=True
+ username=username, password=password, basic_auth=True,last_version=last_version
)
except:
files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'],
- local_path=CONFIG['local_data_dir'], no_download=no_update)
+ local_path=CONFIG['local_data_dir'], no_download=no_update,last_version=last_version)
if files is not None:
@@ -185,6 +188,14 @@ def load(trange=['2018-11-5', '2018-11-6'],
if downloadonly:
return out_files
+ # find the list of varnames for RFS data
+ # these files have > 1500 variables, but
+ # we only load ~50
+ if 'rfs' in datatype.lower() and varformat is None and varnames == []:
+ varnames = rfs_variables_to_load(out_files)
+ # we'll need the support data for the quality flags
+ get_support_data = True
+
tvars = cdf_to_tplot(out_files, suffix=suffix, prefix=prefix, get_support_data=get_support_data,
varformat=varformat, varnames=varnames, notplot=notplot)
diff --git a/pyspedas/psp/rfs.py b/pyspedas/psp/rfs.py
new file mode 100644
index 00000000..f4dc645e
--- /dev/null
+++ b/pyspedas/psp/rfs.py
@@ -0,0 +1,30 @@
+import cdflib
+
+
+def rfs_variables_to_load(files):
+ """
+ This function finds a list of variables to load
+ from the RFS files (essentially the same behavior
+ as the IDL code).
+ """
+ out = []
+ if len(files) == 0:
+ return []
+ # the variables should be the same across all files
+ file = files[0]
+ cdf_file = cdflib.CDF(file)
+ cdf_info = cdf_file.cdf_info()
+ variables = cdf_info['rVariables'] + cdf_info['zVariables']
+ for variable in variables:
+ if variable[0:7] != 'psp_fld':
+ continue
+ try:
+ elements = cdf_file.varget(variable)
+ except ValueError:
+ continue
+ if elements is None:
+ continue
+ if variable in out:
+ continue
+ out.append(variable)
+ return out
diff --git a/pyspedas/psp/tests/tests.py b/pyspedas/psp/tests/tests.py
index c8baba48..047487cb 100644
--- a/pyspedas/psp/tests/tests.py
+++ b/pyspedas/psp/tests/tests.py
@@ -1,38 +1,123 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
+ def test_unpublished_data(self):
+ """
+ this test doesn't load any data, since the username/pw is invalid
+ """
+ # no password
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_RTN', username='hello')
+ # invalid password
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_RTN', username='hello', password='world')
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_SC', username='hello', password='world')
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_SC_1min', username='hello', password='world')
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_RTN_1min', username='hello', password='world')
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_RTN_4_Sa_per_Cyc', username='hello', password='world')
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_SC_4_Sa_per_Cyc', username='hello', password='world')
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='sqtn_rfs_V1V2', username='hello', password='world')
+ spc = pyspedas.psp.spc(trange=['2018-11-5', '2018-11-5/06:00'], username='hello', password='world')
+ spi = pyspedas.psp.spi(trange=['2018-11-5', '2018-11-5/06:00'], username='hello', password='world')
+
+ def test_load_dfb_dbm_dvac(self):
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='dfb_dbm_dvac', level='l2')
+ self.assertTrue(data_exists('psp_fld_l2_dfb_dbm_dvac12'))
+
def test_load_fld_data(self):
fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_rtn', level='l2', time_clip=True)
self.assertTrue(data_exists('psp_fld_l2_mag_RTN'))
+ filtered = pyspedas.psp.filter_fields('psp_fld_l2_mag_RTN', [4, 16])
+ self.assertTrue(data_exists('psp_fld_l2_mag_RTN_004016'))
+ filtered = pyspedas.psp.filter_fields('psp_fld_l2_mag_RTN', 0)
+ self.assertTrue(data_exists('psp_fld_l2_mag_RTN_000'))
+ filtered = pyspedas.psp.filter_fields('psp_fld_l2_mag_RTN', [4, 16], keep=True)
+
+ def test_load_fld_1min(self):
+ fields_vars = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_rtn_1min', level='l2')
+ filtered = pyspedas.psp.filter_fields('psp_fld_l2_mag_RTN_1min', [4, 16])
+ self.assertTrue(data_exists('psp_fld_l2_mag_RTN_1min'))
+ self.assertTrue(data_exists('psp_fld_l2_quality_flags'))
+ notplot = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_rtn_1min', level='l2', notplot=True)
+ self.assertTrue('psp_fld_l2_mag_RTN_1min' in notplot.keys())
+
+ def test_load_fld_rtn_4_per_cyc(self):
+ fields = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_rtn_4_per_cycle', level='l2')
+ filtered = pyspedas.psp.filter_fields('psp_fld_l2_mag_RTN_4_Sa_per_Cyc', [4, 16])
+ self.assertTrue(data_exists('psp_fld_l2_mag_RTN_4_Sa_per_Cyc'))
+ self.assertTrue(data_exists('psp_fld_l2_quality_flags'))
+
+ def test_load_fld_sc_4_per_cyc(self):
+ fields = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='mag_sc_4_per_cycle',
+ level='l2')
+ filtered = pyspedas.psp.filter_fields('psp_fld_l2_mag_SC_4_Sa_per_Cyc', [4, 16])
+ self.assertTrue(data_exists('psp_fld_l2_mag_SC_4_Sa_per_Cyc'))
+ self.assertTrue(data_exists('psp_fld_l2_quality_flags'))
+
+ def test_load_sqtn_rfs_v1v2(self):
+ fields = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='sqtn_rfs_v1v2')
+ filtered = pyspedas.psp.filter_fields('electron_density', [4, 16])
+ self.assertTrue(data_exists('electron_density'))
+ self.assertTrue(data_exists('electron_core_temperature'))
+
+ def test_load_dfb_dc_spec(self):
+ fields = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='dfb_dc_spec')
+ filtered = pyspedas.psp.filter_fields('psp_fld_l2_dfb_dc_spec_dV12hg', [4, 16])
+ self.assertTrue(data_exists('psp_fld_l2_dfb_dc_spec_dV12hg'))
+ self.assertTrue(data_exists('psp_fld_l2_dfb_dc_spec_SCMdlfhg'))
+
+ def test_load_dfb_ac_xspec(self):
+ fields = pyspedas.psp.fields(trange=['2018-11-5', '2018-11-5/06:00'], datatype='dfb_ac_xspec')
+ filtered = pyspedas.psp.filter_fields('psp_fld_l2_dfb_ac_xspec_power_ch1_SCMdlfhg', [4, 16])
+ self.assertTrue(data_exists('psp_fld_l2_dfb_ac_xspec_power_ch1_SCMdlfhg'))
+ self.assertTrue(data_exists('psp_fld_l2_dfb_ac_xspec_power_ch1_SCMdlfhg'))
def test_load_spc_data(self):
spc_vars = pyspedas.psp.spc(trange=['2018-11-5', '2018-11-6'], datatype='l3i', level='l3')
- self.assertTrue(data_exists('np_fit'))
+ self.assertTrue(data_exists('psp_spc_np_fit'))
+ self.assertTrue(data_exists('psp_spc_np_fit_uncertainty'))
+ self.assertTrue(data_exists('psp_spc_wp_fit'))
+ self.assertTrue(data_exists('psp_spc_vp_fit_SC'))
+ self.assertTrue(data_exists('psp_spc_vp_fit_RTN'))
+ self.assertTrue(data_exists('psp_spc_np1_fit'))
def test_load_spe_data(self):
spe_vars = pyspedas.psp.spe(trange=['2018-11-5', '2018-11-6'], datatype='spa_sf1_32e', level='l2')
- self.assertTrue(data_exists('EFLUX'))
+ self.assertTrue(data_exists('psp_spe_EFLUX'))
+ self.assertTrue(data_exists('psp_spe_QUALITY_FLAG'))
def test_load_spi_data(self):
spi_vars = pyspedas.psp.spi(trange=['2018-11-5', '2018-11-6'], datatype='spi_sf0a_mom_inst', level='l3')
- self.assertTrue(data_exists('DENS'))
+ self.assertTrue(data_exists('psp_spi_DENS'))
+ self.assertTrue(data_exists('psp_spi_VEL'))
+ self.assertTrue(data_exists('psp_spi_T_TENSOR'))
+ self.assertTrue(data_exists('psp_spi_TEMP'))
+ self.assertTrue(data_exists('psp_spi_EFLUX_VS_ENERGY'))
+ self.assertTrue(data_exists('psp_spi_EFLUX_VS_THETA'))
+ self.assertTrue(data_exists('psp_spi_EFLUX_VS_PHI'))
def test_load_epihi_data(self):
epihi_vars = pyspedas.psp.epihi(trange=['2018-11-5', '2018-11-5/06:00'], datatype='let1_rates1h', level='l2')
- self.assertTrue(data_exists('B_He_Rate'))
+ self.assertTrue(data_exists('psp_epihi_B_He_Rate'))
+ self.assertTrue(data_exists('psp_epihi_R1A_He_BIN'))
+ self.assertTrue(data_exists('psp_epihi_R3B_He_BIN'))
+ self.assertTrue(data_exists('psp_epihi_R6A_He_BIN'))
def test_load_epi_data(self):
epilo_vars = pyspedas.psp.epi()
- self.assertTrue(data_exists('HET_A_Electrons_Rate_TS'))
+ self.assertTrue(data_exists('psp_isois_HET_A_Electrons_Rate_TS'))
+ self.assertTrue(data_exists('psp_isois_HET_A_H_Rate_TS'))
+ self.assertTrue(data_exists('psp_isois_A_H_Rate_TS'))
+ self.assertTrue(data_exists('psp_isois_A_Heavy_Rate_TS'))
+ self.assertTrue(data_exists('psp_isois_H_CountRate_ChanP_SP'))
+ self.assertTrue(data_exists('psp_isois_Electron_CountRate_ChanE'))
def test_downloadonly(self):
files = pyspedas.psp.epilo(downloadonly=True)
self.assertTrue(os.path.exists(files[0]))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/rbsp/README.md b/pyspedas/rbsp/README.md
index df96a7c2..b96ec67e 100644
--- a/pyspedas/rbsp/README.md
+++ b/pyspedas/rbsp/README.md
@@ -36,9 +36,15 @@ tplot(['efield_in_inertial_frame_spinfit_mgse', 'spacecraft_potential'])
#### Radiation Belt Storm Probes Ion Composition Experiment (RBSPICE)
```python
-rbspice_vars = pyspedas.rbsp.rbspice(trange=['2018-11-5', '2018-11-6'], datatype='tofxeh', level='l3')
+rbspice_vars = pyspedas.rbsp.rbspice(trange=['2018-11-5', '2018-11-6'], datatype='TOFxEH', level='l3')
-tplot('Alpha')
+tplot('rbspa_rbspice_l3_TOFxEH_proton_omni_spin')
+
+# calculate the pitch angle distributions
+from pyspedas.rbsp.rbspice_lib.rbsp_rbspice_pad import rbsp_rbspice_pad
+rbsp_rbspice_pad(probe='a', datatype='TOFxEH', level='l3')
+
+tplot('rbspa_rbspice_l3_TOFxEH_proton_omni_0-1000keV_pad_spin')
```
#### Energetic Particle, Composition, and Thermal Plasma Suite (ECT)
@@ -54,7 +60,6 @@ tplot('Ion_density')
rept_vars = pyspedas.rbsp.rept(trange=['2018-11-5', '2018-11-6'], level='l3', rel='rel03')
-tplot('Tperp_e_200')
```
#### Relativistic Proton Spectrometer (RPS)
diff --git a/pyspedas/rbsp/__init__.py b/pyspedas/rbsp/__init__.py
index 2276bb16..bd485fd7 100644
--- a/pyspedas/rbsp/__init__.py
+++ b/pyspedas/rbsp/__init__.py
@@ -1,5 +1,9 @@
-
from .load import load
+from pyspedas.rbsp.rbspice_lib.rbsp_load_rbspice_read import rbsp_load_rbspice_read
+from pyspedas.rbsp.rbspice_lib.rbsp_rbspice_omni import rbsp_rbspice_omni
+from pyspedas.rbsp.rbspice_lib.rbsp_rbspice_spin_avg import rbsp_rbspice_spin_avg
+from pyspedas.utilities.datasets import find_datasets
+
def emfisis(trange=['2018-11-5', '2018-11-6'],
probe='a',
@@ -120,12 +124,13 @@ def emfisis(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='emfisis', wavetype=wavetype, trange=trange, probe=probe, datatype=datatype, level=level, cadence=cadence, coord=coord, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def rbspice(trange=['2018-11-5', '2018-11-6'],
probe='a',
- datatype='tofxeh',
+ datatype='TOFxEH',
level='l3',
suffix='',
- get_support_data=False,
+ get_support_data=True,
varformat=None,
varnames=[],
downloadonly=False,
@@ -183,7 +188,44 @@ def rbspice(trange=['2018-11-5', '2018-11-6'],
List of tplot variables created.
"""
- return load(instrument='rbspice', trange=trange, probe=probe, datatype=datatype, level=level, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ # Valid names
+ vprobe = ['a', 'b']
+ vlevels = ['l1', 'l2', 'l3', 'l4']
+ vdatatypesl1 = ['TOFxEH', 'TOFxEnonH', 'TOFxPHHHELT']
+ vdatatypesl2 = ['TOFxEH', 'TOFxEnonH', 'TOFxPHHHELT']
+ vdatatypesl3 = ['TOFxEH', 'TOFxEnonH', 'TOFxPHHHELT']
+ vdatatypesl3pap = [''] # L3PAP data is not yet supported
+ vdatatypesl4 = [''] # L4 data is not yet supported
+ vdatatypes = vdatatypesl1 + vdatatypesl2 + vdatatypesl3 + vdatatypesl3pap + vdatatypesl4
+ vdatatypes_lower = [vdatatype.lower() for vdatatype in vdatatypes]
+
+ tvars = load(instrument='rbspice', trange=trange, probe=probe, datatype=datatype, level=level, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ if not isinstance(probe, list):
+ probe = [probe]
+
+ if datatype.lower() in vdatatypes_lower:
+ for prb in probe:
+ # Add energy channel energy values to primary data variable,
+ # create variables for individual telescopes, and set appropriate tplot options
+ rbsp_load_rbspice_read(level=level, probe=prb, datatype=datatype)
+
+ # Calculate omni-directional variable
+ omni_vars = rbsp_rbspice_omni(probe=prb, datatype=datatype, level=level)
+ if omni_vars:
+ tvars.extend(omni_vars)
+
+ # Calculate spin-averaged variable
+ sp_avg_vars = rbsp_rbspice_spin_avg(probe=prb, datatype=datatype, level=level)
+ if omni_vars:
+ tvars.extend(sp_avg_vars)
+
+ return tvars
+
def efw(trange=['2015-11-5', '2015-11-6'],
probe='a',
@@ -250,6 +292,7 @@ def efw(trange=['2015-11-5', '2015-11-6'],
"""
return load(instrument='efw', trange=trange, probe=probe, datatype=datatype, level=level, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def mageis(trange=['2015-11-5', '2015-11-6'],
probe='a',
datatype='',
@@ -316,6 +359,7 @@ def mageis(trange=['2015-11-5', '2015-11-6'],
"""
return load(instrument='mageis', rel=rel, trange=trange, probe=probe, datatype=datatype, level=level, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def hope(trange=['2015-11-5', '2015-11-6'],
probe='a',
datatype='moments',
@@ -382,6 +426,7 @@ def hope(trange=['2015-11-5', '2015-11-6'],
"""
return load(instrument='hope', rel=rel, trange=trange, probe=probe, datatype=datatype, level=level, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def rept(trange=['2015-11-5', '2015-11-6'],
probe='a',
datatype='',
@@ -448,6 +493,7 @@ def rept(trange=['2015-11-5', '2015-11-6'],
"""
return load(instrument='rept', rel=rel, trange=trange, probe=probe, datatype=datatype, level=level, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def rps(trange=['2015-11-5', '2015-11-6'],
probe='a',
datatype='rps-1min',
@@ -512,3 +558,7 @@ def rps(trange=['2015-11-5', '2015-11-6'],
"""
return load(instrument='rps', trange=trange, probe=probe, datatype=datatype, level=level, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Van Allen Probes (RBSP)', instrument=instrument, label=label)
diff --git a/pyspedas/rbsp/load.py b/pyspedas/rbsp/load.py
index 541f43d0..8137ec4a 100644
--- a/pyspedas/rbsp/load.py
+++ b/pyspedas/rbsp/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
@@ -38,8 +38,16 @@ def load(trange=['2018-11-5', '2018-11-6'],
if not isinstance(probe, list):
probe = [probe]
+ datatype_in = datatype
+ datatype = datatype.lower()
+ prefix = ''
out_files = []
+ if notplot:
+ tvars = {}
+ else:
+ tvars = []
+
for prb in probe:
if instrument == 'emfisis':
if datatype == 'density' or datatype == 'housekeeping' or datatype == 'wna-survey':
@@ -53,6 +61,7 @@ def load(trange=['2018-11-5', '2018-11-6'],
pathformat = 'rbsp'+prb+'/'+level+'/'+instrument+'/'+datatype+'/'+cadence+'/'+coord+'/%Y/rbsp-'+prb+'_'+datatype+'_'+cadence+'-'+coord+'_'+instrument+'-'+level+'_%Y%m%d_v*.cdf'
elif instrument == 'rbspice':
pathformat = 'rbsp'+prb+'/'+level+'/'+instrument+'/'+datatype+'/%Y/rbsp-'+prb+'-'+instrument+'_lev-'+str(level[-1])+'?'+datatype+'_%Y%m%d_v*.cdf'
+ prefix = 'rbsp'+prb+'_rbspice_'+level+'_'+datatype_in+'_'
elif instrument == 'efw':
if level == 'l3':
pathformat = 'rbsp'+prb+'/'+level+'/'+instrument+'/%Y/rbsp'+prb+'_'+instrument+'-'+level+'_%Y%m%d_v??.cdf'
@@ -75,7 +84,6 @@ def load(trange=['2018-11-5', '2018-11-6'],
elif datatype == 'rps':
pathformat = 'rbsp'+prb+'/'+level+'/rps/psbr-rps/%Y/rbsp'+prb+'_'+level+'_psbr-rps_%Y%m%d_v*.cdf'
-
# find the full remote path names using the trange
remote_names = dailynames(file_format=pathformat, trange=trange)
@@ -84,13 +92,18 @@ def load(trange=['2018-11-5', '2018-11-6'],
for file in files:
out_files.append(file)
- out_files = sorted(out_files)
+ if not downloadonly:
+ tvars_o = cdf_to_tplot(sorted(out_files), prefix=prefix, suffix=suffix, get_support_data=get_support_data,
+ varformat=varformat, varnames=varnames, notplot=notplot)
+
+ if notplot:
+ tvars = dict(tvars, **tvars_o)
+ else:
+ tvars.extend(tvars_o)
if downloadonly:
- return out_files
+ return sorted(out_files)
- tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
-
if notplot:
return tvars
diff --git a/pyspedas/rbsp/rbspice_lib/__init__.py b/pyspedas/rbsp/rbspice_lib/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/rbsp/rbspice_lib/rbsp_load_rbspice_read.py b/pyspedas/rbsp/rbspice_lib/rbsp_load_rbspice_read.py
new file mode 100644
index 00000000..a6953b6b
--- /dev/null
+++ b/pyspedas/rbsp/rbspice_lib/rbsp_load_rbspice_read.py
@@ -0,0 +1,87 @@
+import logging
+from pytplot import get_data, store_data, options
+from pyspedas import tnames
+
+
+def rbsp_load_rbspice_read(level='l3', probe='a', datatype='TOFxEH'):
+ """
+ Works on previously loaded RBSPICE tplot variables: adds energy channel energy values to primary data variable, separates non-H species variables,
+ creates variables for individual telescopes, and sets appropriate tplot options
+
+ Parameters
+ ----------
+ level : str
+ data level ['l1','l2','l3' (default),'l3pap']
+ probe : str
+ RBSP spacecraft indicator [Options: 'a' (default), 'b']
+ datatype : str
+ RBSPICE data type ['EBR','ESRHELT','ESRLEHT','IBR','ISBR','ISRHELT','TOFxEH' (default),'TOFxEIon','TOFxEnonH','TOFxPHHHELT','TOFxPHHLEHT'],
+ but change for different data levels.
+ """
+ if level != 'l1':
+ units_label = '1/(cm^2-sr-s-keV)'
+ convert_factor = 1000. # to convert flux from 1/MeV to 1/keV
+ else:
+ units_label = 'counts/s'
+ convert_factor = 1. # do not need to convert counts/s
+
+ prefix = 'rbsp'+probe+'_rbspice_'+level+'_'+datatype+'_'
+ # find the flux/cps data name(s)
+ data_var = tnames(prefix + 'F*DU')
+ energy_var = tnames(prefix + 'F*DU_Energy')
+
+ logging.info('Correcting RBSPICE energy tables...')
+ for i in range(len(data_var)):
+ en_data = get_data(energy_var[i])
+ temp_energy = en_data.transpose()
+ temp = get_data(data_var[i])
+ data = temp.y.transpose([0, 2, 1])
+ species_str = data_var[i][-4:-2]
+ if species_str == 'FP':
+ species='proton'
+ yticks = 1
+ if datatype != 'TOFxPHHHELT':
+ new_energy = temp_energy[:,0] * 1000. # convert energy from MeV to keV
+ new_flux = data / convert_factor # convert flux from 1/MeV to 1/keV
+ zrange = [5.,1.e5]
+ else:
+ new_energy = temp_energy[11:-1,0] * 1000. # convert energy from MeV to keV
+ new_flux = data[:,11:-1,:] / convert_factor # convert energy from MeV to keV
+ zrange = [2.e2,1.e6]
+ elif species_str == 'He':
+ species='helium'
+ yticks = 1
+ new_energy = temp_energy[0:10,0] * 1000. # convert energy from MeV to keV
+ new_flux = data[:,0:10,:] / convert_factor # convert flux from 1/MeV to 1/keV
+ zrange = [1.,1.e3]
+ elif species_str == 'FO':
+ species='oxygen'
+ yticks = 2
+ if datatype != 'TOFxPHHHELT':
+ new_energy = temp_energy[11:18,0] * 1000. # convert energy from MeV to keV
+ new_flux = data[:,11:18,:] / convert_factor # convert flux from 1/MeV to 1/keV
+ zrange = [1.,1.e2]
+ else:
+ new_energy = temp_energy[0:10,0] * 1000. # convert energy from MeV to keV
+ new_flux = data[:,0:10,:] / convert_factor # convert flux from 1/MeV to 1/keV
+ zrange = [1e1,1.e4]
+ new_name = prefix+species
+ # note: can't save the energy table here, due to the shape of new_flux
+ # so we'll have to grab the energy table from the individual telescope
+ # variables
+ store_data(new_name, data={'x':temp.times, 'y':new_flux})
+ options(new_name, 'ylog', True)
+ options(new_name, 'zlog', True)
+ options(new_name, 'zrange', zrange)
+ options(new_name, 'ytitle', 'rbsp'+probe+'_rbspice_'+species)
+ options(new_name, 'ysubtitle', 'Energy [keV]')
+ options(new_name, 'ztitle', units_label)
+ for j in range(6):
+ store_data(new_name+'_T'+str(j), data={'x':temp.times, 'y':new_flux[:,:,j], 'v':new_energy})
+ options(new_name+'_T'+str(j), 'spec', True)
+ options(new_name+'_T'+str(j), 'ylog', True)
+ options(new_name+'_T'+str(j), 'zlog', True)
+ options(new_name+'_T'+str(j), 'zrange', zrange)
+ options(new_name+'_T'+str(j), 'ytitle', 'rbsp'+probe+'_rbspice_'+species+'_T'+str(j))
+ options(new_name+'_T'+str(j), 'ysubtitle', '[keV]')
+ options(new_name+'_T'+str(j), 'ztitle', units_label)
diff --git a/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_omni.py b/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_omni.py
new file mode 100644
index 00000000..497e17ae
--- /dev/null
+++ b/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_omni.py
@@ -0,0 +1,93 @@
+import logging
+import numpy as np
+from pytplot import get_data, store_data, options
+from pyspedas import tnames
+
+# use nanmean from bottleneck if it's installed, otherwise use the numpy one
+# bottleneck nanmean is ~2.5x faster
+try:
+ import bottleneck as bn
+ nanmean = bn.nanmean
+except ImportError:
+ nanmean = np.nanmean
+
+
+def rbsp_rbspice_omni(probe='a', datatype='TOFxEH', level='l3'):
+ """
+ Calculates the omni-directional flux for all 6 telescopes
+
+ Parameters
+ ----------
+ probe : str
+ RBSP spacecraft indicator [Options: 'a' (default), 'b']
+ datatype : str
+ RBSPICE data type ['EBR','ESRHELT','ESRLEHT','IBR','ISBR','ISRHELT','TOFxEH' (default),'TOFxEIon','TOFxEnonH','TOFxPHHHELT','TOFxPHHLEHT'],
+ but change for different data levels.
+ level : str
+ data level ['l1','l2','l3' (default),'l3pap']
+
+ Returns
+ -------
+ Tplot variables created
+ """
+ if probe is None:
+ probe = 'a'
+ if datatype is None:
+ datatype = 'TOFxEH'
+ if level is None:
+ level = 'l3'
+ if level != 'l1':
+ units_label = '1/(cm^2-sr-s-keV)'
+ else:
+ units_label = 'counts/s'
+
+ prefix = 'rbsp'+probe+'_rbspice_'+level+'_'+datatype+'_'
+
+ # find the flux/cps data name(s)
+ data_var = tnames(prefix + 'F*DU')
+
+ if not data_var:
+ logging.error('Error, problem finding the RBSPICE data to calculate omni-directional spectrograms')
+ return
+
+ logging.info('Calculating omni directional energy spectra; this might take a few minutes...')
+ out = []
+
+ for i in range(len(data_var)):
+ species_str = data_var[i][-4:-2]
+ if species_str == 'FP':
+ species='proton'
+ if datatype != 'TOFxPHHHELT':
+ zrange = [5., 1.e5]
+ else:
+ zrange = [2.e2, 1.e6]
+ elif species_str == 'He':
+ species = 'helium'
+ zrange = [1., 5.e2]
+ elif species_str == 'FO':
+ species = 'oxygen'
+ if datatype != 'TOFxPHHHELT':
+ zrange = [1., 1.e2]
+ else:
+ zrange = [1e1, 1.e4]
+
+ # load the flux/cps data
+ d = get_data(prefix+species)
+ d_for_en_table = get_data(prefix+species+'_T0')
+
+ if d is not None:
+ flux_omni = np.zeros((len(d.times),len(d.y[0, :, 0])))
+ for k in range(len(d.times)):
+ for l in range(len(d.y[0, :, 0])):
+ flux_omni[k, l] = nanmean(d.y[k, l, :])
+ newname = prefix+species+'_omni'
+ store_data(newname, data={'x': d.times, 'y': flux_omni, 'v': d_for_en_table.v})
+ options(newname, 'ylog', True)
+ options(newname, 'zlog', True)
+ options(newname, 'spec', True)
+ options(newname, 'zrange', zrange)
+ options(newname, 'ytitle', 'rbsp-'+probe+'\nrbspice\n'+species+'\nomni')
+ options(newname, 'ysubtitle', '[keV]')
+ options(newname, 'ztitle', units_label)
+ out.append(newname)
+ return out
diff --git a/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_pad.py b/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_pad.py
new file mode 100644
index 00000000..527e4bce
--- /dev/null
+++ b/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_pad.py
@@ -0,0 +1,172 @@
+import logging
+import numpy as np
+from pytplot import get_data, store_data, options
+from pyspedas.rbsp.rbspice_lib.rbsp_rbspice_pad_spinavg import rbsp_rbspice_pad_spinavg
+
+# use nanmean from bottleneck if it's installed, otherwise use the numpy one
+# bottleneck nanmean is ~2.5x faster
+try:
+ import bottleneck as bn
+ nanmean = bn.nanmean
+except ImportError:
+ nanmean = np.nanmean
+
+
+def rbsp_rbspice_pad(probe='a', datatype='TOFxEH', level='l3', energy=[0, 1000], bin_size=15, scopes=None):
+ """
+ Calculate pitch angle distributions using data from the
+ RBSP Radiation Belt Storm Probes Ion Composition Experiment (RBSPICE)
+
+ Parameters
+ ----------
+ probe : str
+ RBSP spacecraft indicator [Options: 'a' (default), 'b']
+ datatype : str
+ desired data type [Options: 'TOFxEH' (default), 'TOFxEnonH']
+ level : str
+ data level ['l1','l2','l3' (default),'l3pap']
+ energy : list
+ user-defined energy range to include in the calculation in keV [default = [0,1000]]
+ bin_size : float
+ desired size of the pitch angle bins in degrees [default = 15]
+ scopes : list
+ string array of telescopes to be included in PAD [0-5, default is all]
+
+ Returns
+ -------
+ Tplot variables created
+ """
+ if datatype == 'TOFxEH':
+ species = 'proton'
+ elif datatype == 'TOFxEnonH':
+ species = ['helium', 'oxygen']
+ elif datatype == 'TOFxPHHHELT':
+ species = ['proton', 'oxygen']
+
+ if not isinstance(species, list):
+ species = [species]
+
+ if level != 'l1':
+ units_label = '1/(cm^2-sr-s-keV)'
+ else:
+ units_label = 'counts/s'
+ if not energy:
+ energy = [0, 1000]
+ if not bin_size:
+ bin_size = 15.
+ if not scopes:
+ scopes = [0, 1, 2, 3, 4, 5]
+
+ prefix = 'rbsp'+probe+'_rbspice_'+level+'_'+datatype+'_'
+
+ if energy[0] > energy[1]:
+ logging.error('Low energy must be given first, then high energy in "energy" keyword')
+ return
+
+ # set up the number of pa bins to create
+ bin_size = float(bin_size)
+ n_pabins = int(180./bin_size)
+ pa_bins = 180.*np.arange(n_pabins+1)/n_pabins
+ pa_label = 180.*np.arange(n_pabins)/n_pabins+bin_size/2.
+
+ logging.info('Num PA bins: ' + str(n_pabins))
+ logging.info('PA bins: ' + str(pa_bins))
+
+ # check to make sure the data exist
+ d = get_data(prefix + 'Alpha')
+ if d is None:
+ logging.error('No '+datatype+' data is currently loaded for probe rbsp-'+probe+' for the selected time period')
+ return
+
+ logging.info('Calculating RBSPICE pitch angle distribution..')
+ out = []
+
+ for ion_type_idx in range(len(species)):
+ # get pitch angle data (all telescopes in single variable)
+ d_pa = get_data(prefix + 'Alpha')
+ pa_file = np.zeros((len(d_pa.times), len(scopes))) # time steps, look direction
+ for aa in range(len(scopes)):
+ pa_file[:, scopes[aa]] = d_pa.y[:, scopes[aa]]
+
+ pa_flux = np.zeros((len(d_pa.times), n_pabins, len(scopes)))
+ pa_flux_nans = np.argwhere(pa_flux == 0)
+ if len(pa_flux_nans) > 0:
+ pa_flux[pa_flux_nans] = np.nan
+ pa_num_in_bin = np.zeros((len(d_pa.times), n_pabins, len(scopes)))
+
+ for qq in range(len(species)):
+ # get flux data (all telescopes in single variable)
+ d_flux = get_data(prefix + species[qq])
+ d_flux_t0 = get_data(prefix + species[qq] + '_T0')
+
+ logging.info(prefix + species[qq])
+ flux_file = np.zeros((len(d_flux.times), len(scopes))) # time steps, look direction
+ flux_file_nans = np.argwhere(flux_file == 0)
+ if len(flux_file_nans) > 0:
+ flux_file[flux_file_nans] = np.nan
+ new_pa_flux = np.zeros((len(d_flux.times), n_pabins, len(scopes))) # the average for each bin
+
+ # get energy range of interest
+ e = d_flux_t0.v
+ indx = np.argwhere((e < energy[1]) & (e > energy[0]))
+
+ if len(indx) == 0:
+ logging.warning('Energy range selected is not covered by the detector for ' + datatype + ' ' + species[ion_type_idx])
+ continue
+
+ for t in range(len(scopes)):
+ # Loop through each time step and get:
+ # 1. the total flux for the energy range of interest for each detector
+ # 2. flux in each pa bin
+ for i in range(len(d_flux.times)): # loop through time
+ flux_file[i, t] = np.nansum(d_flux.y[i, indx, scopes[t]]) # start with lowest energy
+ for j in range(n_pabins): # loop through pa bins
+ if (pa_file[i, t] > pa_bins[j]) and (pa_file[i,t] < pa_bins[j+1]):
+ if not np.isfinite(pa_flux[i, j, t]):
+ pa_flux[i, j, t] = flux_file[i, t]
+ else:
+ pa_flux[i, j, t] = pa_flux[i, j, t] + flux_file[i, t]
+ pa_num_in_bin[i, j, t] += 1.0
+
+ # loop over time
+ for i in range(len(pa_flux[:, 0, 0])):
+ # loop over bins
+ for bin_idx in range(len(pa_flux[i, :, 0])):
+ if pa_num_in_bin[i, bin_idx, t] != 0.0:
+ new_pa_flux[i, bin_idx, t] = pa_flux[i, bin_idx, t]/pa_num_in_bin[i, bin_idx, t]
+ else:
+ new_pa_flux[i, bin_idx, t] = np.nan
+
+ en_range_string = str(energy[0]) + '-' + str(energy[1]) + 'keV'
+ if len(scopes) == 6:
+ new_name = prefix+species[qq]+'_omni_'+en_range_string+'_pad'
+ new_omni_pa_flux = np.zeros((len(new_pa_flux[:, 0, 0]),len(new_pa_flux[0, :, 0])))
+ for ii in range(len(new_pa_flux[:, 0, 0])):
+ for jj in range(len(new_pa_flux[0, :, 0])):
+ new_omni_pa_flux[ii, jj] = nanmean(new_pa_flux[ii, jj, :])
+ store_data(new_name, data={'x': d_flux.times, 'y': new_omni_pa_flux, 'v': pa_label})
+ options(new_name, 'yrange', [0, 180])
+ options(new_name, 'spec', True)
+ options(new_name, 'zlog', True)
+ options(new_name, 'ytitle', 'rbsp-'+probe+'\nrbspice\n'+species[ion_type_idx]+'\nomni')
+ options(new_name, 'ysubtitle', en_range_string+'\nPA [Deg]')
+ options(new_name, 'ztitle', units_label)
+ out.append(new_name)
+ else:
+ new_name = []
+ for ii in range(len(scopes)):
+ new_name.append(prefix+species[qq]+'_T'+str(scopes[ii])+'_'+en_range_string+'_pad')
+ store_data(new_name[ii], data={'x': d_flux.times, 'y': new_pa_flux[:, :, ii], 'v': pa_label})
+ options(new_name[ii], 'yrange', [0, 180])
+ options(new_name[ii], 'spec', True)
+ options(new_name[ii], 'zlog', True)
+ options(new_name[ii], 'ytitle', 'rbsp-'+probe+'\nrbspice\n' +species[ion_type_idx]+'\nT'+str(scopes[t]))
+ options(new_name[ii], 'ysubtitle', en_range_string + '\nPA [Deg]')
+ options(new_name[ii], 'ztitle', units_label)
+ out.append(new_name[ii])
+
+ # now do the spin average
+ sp_vars = rbsp_rbspice_pad_spinavg(probe=probe, datatype=datatype, species=species[ion_type_idx], energy=energy, bin_size=bin_size, scopes=scopes)
+ if sp_vars is not None:
+ out.extend(sp_vars)
+ return out
diff --git a/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_pad_spinavg.py b/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_pad_spinavg.py
new file mode 100644
index 00000000..bddcd4a1
--- /dev/null
+++ b/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_pad_spinavg.py
@@ -0,0 +1,127 @@
+import logging
+import numpy as np
+import scipy
+from pytplot import get_data, store_data, options
+
+# use nanmean from bottleneck if it's installed, otherwise use the numpy one
+# bottleneck nanmean is ~2.5x faster
+try:
+ import bottleneck as bn
+ nanmean = bn.nanmean
+except ImportError:
+ nanmean = np.nanmean
+
+
+def rbsp_rbspice_pad_spinavg(probe='a', datatype='TOFxEH', level='l3', species=None, energy=[0, 1000], bin_size=15., scopes=None):
+ """
+ Calculates spin-averaged PADs for the RBSPICE instrument
+
+ Parameters
+ ----------
+ probe : str
+ RBSP spacecraft indicator [Options: 'a' (default), 'b']
+ datatype : str
+ desired data type [Options: 'TOFxEH' (default), 'TOFxEnonH']
+ level : str
+ data level ['l1','l2','l3' (default),'l3pap']
+ species : str
+ desired ion species [Options: 'proton' (default), 'helium', 'oxygen']
+ energy : list
+ user-defined energy range to include in the calculation in keV [default = [0,1000]]
+ bin_size : float
+ desired size of the pitch angle bins in degrees [default = 15]
+ scopes : list
+ string array of telescopes to be included in PAD [0-5, default is all]
+
+ Returns
+ --------
+ Tplot variables created
+ """
+ if level != 'l1':
+ units_label = '1/(cm^2-sr-s-keV)'
+ else:
+ units_label = 'counts/s'
+ if species is None and datatype == 'TOFxEH':
+ species = 'proton'
+ elif species is None and datatype == 'TOFxEnonH':
+ species = ['helium', 'oxygen']
+ elif species is None and datatype == 'TOFxPHHHELT':
+ species = ['proton', 'oxygen']
+ if energy is None:
+ energy = [0, 1000]
+ if bin_size is None:
+ bin_size = 15.
+ if scopes is None:
+ scopes = [0, 1, 2, 3, 4, 5]
+
+ en_range_string = str(energy[0]) + '-' + str(energy[1]) + 'keV'
+
+ prefix = 'rbsp'+probe+'_rbspice_'+level+'_'+datatype+'_'
+ spin_nums = get_data(prefix + 'Spin')
+
+ if spin_nums is None:
+ logging.error('Spin variable not found: ' + prefix + 'Spin')
+ return
+
+ # find where the spins start
+ spin_starts = np.unique(spin_nums.y, return_index=True)[1][1:]-1
+ if len(scopes) == 6:
+ pad_name = [prefix+species+'_omni_'+en_range_string+'_pad']
+ else:
+ pad_name = [prefix+species+'_T'+str(i)+'_'+en_range_string+'_pad' for i in scopes]
+
+ logging.info('Calculating spin averaged pitch angle distribution..')
+ out = []
+
+ for ii in range(len(pad_name)):
+ pad_data = get_data(pad_name[ii])
+
+ if pad_data is None:
+ logging.error('Error, variable containing valid PAD data missing.')
+ return
+
+ # the following is for rebinning and interpolating to new_bins
+ n_pabins = 180. / bin_size
+ new_bins = 180. * np.arange(n_pabins + 1) / n_pabins
+ srx = [float(len(pad_data.v)) / (int(n_pabins) + 1) * (x + 0.5) - 0.5 for x in range(int(n_pabins) + 1)]
+
+ spin_sum_flux = np.zeros((len(spin_starts), len(pad_data.y[0, :])))
+ rebinned_data = np.zeros((len(spin_starts), len(pad_data.y[0, :])+1))
+ spin_times = np.zeros(len(spin_starts))
+
+ current_start = 0
+ # loop through the spins for this telescope
+ for spin_idx in range(len(spin_starts)):
+ # loop over energies
+ spin_sum_flux[spin_idx,:] = nanmean(pad_data.y[current_start:spin_starts[spin_idx]+1,:], axis=0)
+ spin_times[spin_idx] = pad_data.times[current_start]
+ # rebin the data before storing it
+ # the idea here is, for bin_size = 15 deg, rebin the data from center points to:
+ # new_bins = [0, 15, 30, 45, 60, 75, 90, 105, 120, 135 , 150, 165, 180]
+ spin_sum_interp = scipy.interpolate.interp1d(np.arange(len(spin_sum_flux[spin_idx, :])), spin_sum_flux[spin_idx, :], fill_value='extrapolate')
+ rebinned_data[spin_idx, :] = spin_sum_interp(srx)
+
+ # we want to take the end values instead of extrapolating
+ # again, to match the functionality of congrid in IDL
+ rebinned_data[spin_idx, 0] = spin_sum_flux[spin_idx, 0]
+ rebinned_data[spin_idx, -1] = spin_sum_flux[spin_idx, -1]
+
+ current_start = spin_starts[spin_idx]+1
+
+ newname = pad_name[ii]+'_spin'
+ if len(scopes) == 6:
+ ytitle = 'rbsp-'+probe+'\nrbspice\n'+species+'\nomni'
+ else:
+ ytitle = 'rbsp-'+probe+'\nrbspice\n'+species+'\nT'+str(scopes[ii])
+
+ store_data(newname, data={'x': spin_times, 'y': rebinned_data, 'v': new_bins})
+ options(newname, 'spec', True)
+ options(newname, 'zlog', True)
+ options(newname, 'ztitle', units_label)
+ options(newname, 'ytitle', ytitle)
+ options(newname, 'yrange', [0, 180.0])
+ options(newname, 'ysubtitle', en_range_string+'\nspin-avg PAD\n(deg)')
+ out.append(newname)
+
+ #tdegap(newname, overwrite=True)
+ return out
diff --git a/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_spin_avg.py b/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_spin_avg.py
new file mode 100644
index 00000000..c76e9f2e
--- /dev/null
+++ b/pyspedas/rbsp/rbspice_lib/rbsp_rbspice_spin_avg.py
@@ -0,0 +1,122 @@
+import logging
+import numpy as np
+from pytplot import get_data, store_data, options
+from pyspedas import tnames
+
+# use nanmean from bottleneck if it's installed, otherwise use the numpy one
+# bottleneck nanmean is ~2.5x faster
+try:
+ import bottleneck as bn
+ nanmean = bn.nanmean
+except ImportError:
+ nanmean = np.nanmean
+
+
+def rbsp_rbspice_spin_avg(probe='a', datatype='TOFxEH', level='l3'):
+ """
+ Calculates spin-averaged fluxes for the RBSPICE instrument
+
+ Parameters
+ ----------
+ probe : str
+ RBSP spacecraft indicator [Options: 'a' (default), 'b']
+ datatype : str
+ RBSPICE data type ['TOFxEH' (default),'TOFxEnonH']
+ level : str
+ data level ['l1','l2','l3' (default),'l3pap']
+
+ Returns
+ --------
+ Tplot variables created
+ """
+ if probe is None:
+ probe = 'a'
+ if datatype is None:
+ datatype = 'TOFxEH'
+ if level is None:
+ level = 'l3'
+ if level != 'l1':
+ units_label = '1/(cm^2-sr-s-keV)'
+ else:
+ units_label = 'counts/s'
+
+ prefix = 'rbsp'+probe+'_rbspice_'+level+'_'+datatype+'_'
+
+ spin_nums = get_data(prefix + 'Spin')
+ if spin_nums is None:
+ return
+ spin_starts = np.unique(spin_nums.y, return_index=True)[1][1:]-1
+
+ if datatype == 'TOFxEH':
+ species = 'proton'
+ elif datatype == 'TOFxEnonH':
+ species = ['helium', 'oxygen']
+ elif datatype == 'TOFxPHHHELT':
+ species = ['proton', 'oxygen']
+
+ if isinstance(species, list):
+ var_data = []
+ for spc in species:
+ var_data.extend(tnames(prefix + spc + '_T?'))
+ var_data.extend(tnames(prefix + spc + '_omni'))
+ else:
+ var_data = tnames(prefix + species + '_T?')
+ var_omni = tnames(prefix + species + '_omni')
+ var_data.extend(var_omni)
+
+ logging.info('Calculating spin averaged energy spectra..')
+ out = []
+ zrange = None
+
+ for n in range(len(var_data)):
+ if var_data[n] == '':
+ logging.error('Error, problem finding the tplot variables to calculate the spin averages')
+ return
+ else:
+ flux_data = get_data(var_data[n])
+ if len(flux_data) < 3:
+ logging.error('Error, couldn''t find energy table for the flux/cps data variable')
+ continue
+ if var_data[n][-2:-1] == 'T':
+ species = var_data[n][-9:-3]
+ elif var_data[n][-4:] == 'omni':
+ species = var_data[n][-11:-5]
+ if species == 'proton':
+ if datatype != 'TOFxPHHHELT':
+ zrange = [5., 1.e5]
+ else:
+ zrange = [2.e2, 1.e6]
+ elif species == 'helium':
+ zrange = [1., 5.e2]
+ elif species == 'oxygen':
+ if datatype != 'TOFxPHHHELT':
+ zrange = [1., 1.e2]
+ else:
+ zrange = [1e1, 1.e4]
+
+ spin_sum_flux = np.zeros((len(spin_starts), len(flux_data.v)))
+ current_start = 0
+ for spin_idx in range(len(spin_starts)):
+ spin_sum_flux[spin_idx, :] = nanmean(flux_data.y[current_start:spin_starts[spin_idx]+1, :], axis=0)
+ current_start = spin_starts[spin_idx]+1
+ sp = '_spin'
+ if var_data[n][-4:] == 'omni':
+ suffix = ''
+ elif var_data[n][-2] == 'T'+str(n):
+ suffix = '_T'+str(n)
+ else:
+ suffix = ''
+ store_data(var_data[n]+sp+suffix, data={'x': spin_nums.times[spin_starts], 'y': spin_sum_flux, 'v': flux_data.v})
+ options(var_data[n]+sp+suffix, 'ylog', True)
+ options(var_data[n]+sp+suffix, 'zlog', True)
+ options(var_data[n]+sp+suffix, 'spec', True)
+ if zrange is not None:
+ options(var_data[n]+sp+suffix, 'zrange', zrange)
+ if isinstance(species, list):
+ options(var_data[n]+sp+suffix, 'ytitle', 'rbsp'+probe+'\nrbspice\n'+datatype+'\n'+suffix)
+ else:
+ options(var_data[n]+sp+suffix, 'ytitle', 'rbsp'+probe+'\nrbspice\n'+species+'\n'+suffix)
+ options(var_data[n]+sp+suffix, 'ysubtitle', '[keV]')
+ options(var_data[n]+sp+suffix, 'ztitle', units_label)
+ out.append(var_data[n]+sp+suffix)
+ return out
diff --git a/pyspedas/rbsp/tests/tests.py b/pyspedas/rbsp/tests/tests.py
index 0afa00f2..e32bbe98 100644
--- a/pyspedas/rbsp/tests/tests.py
+++ b/pyspedas/rbsp/tests/tests.py
@@ -1,10 +1,10 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
from pytplot import del_data
+from pyspedas.rbsp.rbspice_lib.rbsp_rbspice_pad import rbsp_rbspice_pad
+
class LoadTestCases(unittest.TestCase):
def tearDown(self):
@@ -14,18 +14,17 @@ def test_downloadonly(self):
files = pyspedas.rbsp.efw(trange=['2015-11-3', '2015-11-4'], level='l3', downloadonly=True)
self.assertTrue(os.path.exists(files[0]))
- # temporarily disabled, 3Dec2021
- # def test_notplot(self):
- # data = pyspedas.rbsp.efw(trange=['2015-11-6', '2015-11-7'], level='l3', notplot=True)
- # self.assertTrue('density' in data.keys())
- # self.assertTrue('Vavg' in data.keys())
- # self.assertTrue('vel_gse' in data.keys())
- # self.assertTrue('efield_inertial_frame_mgse' in data.keys())
- # self.assertTrue('x' in data['density'].keys())
- # self.assertTrue('y' in data['density'].keys())
+ def test_notplot(self):
+ data = pyspedas.rbsp.efw(trange=['2015-11-6', '2015-11-7'], level='l3', notplot=True)
+ self.assertTrue('density' in data.keys())
+ self.assertTrue('efield_in_inertial_frame_spinfit_mgse' in data.keys())
+ self.assertTrue('x' in data['density'].keys())
+ self.assertTrue('y' in data['density'].keys())
def test_load_emfisis_data(self):
emfisis_vars = pyspedas.rbsp.emfisis(trange=['2018-11-5', '2018-11-6'], datatype='magnetometer', level='l3', time_clip=True)
+ self.assertTrue(data_exists('Mag'))
+ emfisis_vars = pyspedas.rbsp.emfisis(trange=['2018-11-5', '2018-11-6'], datatype='magnetometer', level='l2')
wfr_vars = pyspedas.rbsp.emfisis(trange=['2018-11-5', '2018-11-6'], level='l2', datatype='wfr')
hfr_vars = pyspedas.rbsp.emfisis(trange=['2018-11-5', '2018-11-6'], level='l2', datatype='hfr')
self.assertTrue(data_exists('Mag'))
@@ -38,14 +37,43 @@ def test_load_emfisis_data(self):
self.assertTrue(data_exists('EwSamples'))
# HFR waveform data
self.assertTrue(data_exists('HFRsamples'))
+ # L4 density
+ dens = pyspedas.rbsp.emfisis(trange=['2018-11-5', '2018-11-6'], datatype='density', level='l4')
+ self.assertTrue(data_exists('density'))
def test_load_efw_data(self):
+ efw_vars = pyspedas.rbsp.efw(trange=['2015-11-5', '2015-11-6'], level='l2')
+ self.assertTrue(data_exists('spec64_e12ac'))
efw_vars = pyspedas.rbsp.efw(trange=['2015-11-5', '2015-11-6'], level='l3')
self.assertTrue(data_exists('density'))
- # def test_load_rbspice_data(self):
- # rbspice_vars = pyspedas.rbsp.rbspice(trange=['2018-11-5', '2018-11-6'], datatype='tofxeh', level='l3')
- # self.assertTrue(data_exists('Alpha'))
+ def test_load_rbspice_download(self):
+ files = pyspedas.rbsp.rbspice(downloadonly=True, trange=['2018-11-5', '2018-11-6'], datatype='tofxeh', level='l3')
+ self.assertTrue(isinstance(files, list))
+
+ def test_load_rbspice_esrhelt(self):
+ rbspice_vars = pyspedas.rbsp.rbspice(trange=['2013-11-5', '2013-11-6'], datatype='ESRHELT', level='l3')
+ self.assertTrue(data_exists('rbspa_rbspice_l3_ESRHELT_FEDU'))
+
+ def test_load_rbspice_data(self):
+ data = pyspedas.rbsp.rbspice(trange=['2018-11-5', '2018-11-6'], datatype='TOFxEH', level='l3')
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxEH_proton_omni_spin'))
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxEH_proton_omni'))
+ rbsp_rbspice_pad(probe='a', datatype='TOFxEH', level='l3')
+ rbsp_rbspice_pad(probe='a', datatype='TOFxEH', level='l3', scopes=[0, 1, 2, 3])
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxEH_proton_omni_0-1000keV_pad'))
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxEH_proton_omni_0-1000keV_pad_spin'))
+ data = pyspedas.rbsp.rbspice(trange=['2018-11-5', '2018-11-6'], datatype='TOFxPHHHELT')
+ rbsp_rbspice_pad(probe='a', datatype='TOFxPHHHELT', level='l3')
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxPHHHELT_oxygen_omni_spin'))
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxPHHHELT_oxygen_omni_0-1000keV_pad_spin'))
+ data = pyspedas.rbsp.rbspice(trange=['2018-11-5', '2018-11-6'], datatype='TOFxEnonH')
+ rbsp_rbspice_pad(probe='a', datatype='TOFxEnonH', level='l3')
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxEnonH_oxygen_omni_spin'))
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxEnonH_oxygen_omni_0-1000keV_pad_spin'))
+ rbsp_rbspice_pad(probe='a', datatype='TOFxEH', level='l3', energy=[0, 1000.0])
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxEH_proton_omni_0-1000.0keV_pad'))
+ self.assertTrue(data_exists('rbspa_rbspice_l3_TOFxEH_proton_omni_0-1000.0keV_pad_spin'))
def test_load_mageis_data(self):
mageis_vars = pyspedas.rbsp.mageis(trange=['2018-11-5', '2018-11-6'], level='l3', rel='rel04')
@@ -54,6 +82,10 @@ def test_load_mageis_data(self):
def test_load_hope_data(self):
hope_vars = pyspedas.rbsp.hope(trange=['2018-11-5', '2018-11-6'], datatype='moments', level='l3', rel='rel04')
self.assertTrue(data_exists('Ion_density'))
+ hope_vars = pyspedas.rbsp.hope(trange=['2018-11-5', '2018-11-6'], datatype='pitchangle', level='l3')
+ self.assertTrue(data_exists('FEDO'))
+ hope_vars = pyspedas.rbsp.hope(trange=['2018-11-5', '2018-11-6'], datatype='spinaverage', level='l2')
+ self.assertTrue(data_exists('I_Ele'))
def test_load_rep_data(self):
rept_vars = pyspedas.rbsp.rept(trange=['2018-11-4', '2018-11-5'], level='l2', rel='rel03')
@@ -62,10 +94,12 @@ def test_load_rep_data(self):
self.assertTrue(data_exists('FPSA'))
self.assertTrue(data_exists('FPDU'))
- # disabled, 16Sep2021 due to a problem with the data
- # def test_load_rps1min_data(self):
- # rps_vars = pyspedas.rbsp.rps()
- # self.assertTrue(data_exists('DOSE2_RATE'))
+ def test_load_rps1min_data(self):
+ rps_vars = pyspedas.rbsp.rps()
+ self.assertTrue(data_exists('DOSE2_RATE'))
+ rps_vars = pyspedas.rbsp.rps(datatype='rps')
+ self.assertTrue(data_exists('FPDU_Energy'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/secs/__init__.py b/pyspedas/secs/__init__.py
index 341df1c5..1b0abded 100644
--- a/pyspedas/secs/__init__.py
+++ b/pyspedas/secs/__init__.py
@@ -7,8 +7,8 @@
import numpy as np
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
-from pyspedas.utilities.time_double import time_double
+from pytplot import time_clip as tclip
+from pytplot import time_double
import pandas as pd
import time
import zipfile
diff --git a/pyspedas/secs/load.py b/pyspedas/secs/load.py
index 7e5c86df..e8b27b02 100644
--- a/pyspedas/secs/load.py
+++ b/pyspedas/secs/load.py
@@ -6,7 +6,7 @@
import numpy as np
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from .config import CONFIG
import zipfile
diff --git a/pyspedas/secs/makeplots.py b/pyspedas/secs/makeplots.py
index 72381fe1..0e19e8ed 100644
--- a/pyspedas/secs/makeplots.py
+++ b/pyspedas/secs/makeplots.py
@@ -170,8 +170,8 @@ def noon_midnight_meridian(dtime=None, delta=0.25):
lons_latmax = 0 + 15 * diff_in_hours # longitude for noon line
lons_latmin = lons_latmax - 180 # longitude for midnight line
elif diff_in_hours < 0:
- lons_latmax = 0 - 15 * diff_in_hours # longitude for noon line
- lons_latmin = lons_latmax + 180 # longitude for midnight line
+ lons_latmax = 0 + 15 * diff_in_hours # longitude for noon line, old version is -
+ lons_latmin = lons_latmax - 180 # longitude for midnight line, old version is +
#
lons_max_arr = np.full((1, ni_half), lons_latmax) # for noon line
lats_max_arr = np.linspace(-90, 90, ni_half) # for noon line
diff --git a/pyspedas/secs/tests/tests.py b/pyspedas/secs/tests/tests.py
new file mode 100644
index 00000000..280c0baf
--- /dev/null
+++ b/pyspedas/secs/tests/tests.py
@@ -0,0 +1,47 @@
+import unittest
+from unittest.mock import patch
+import pyspedas
+from pyspedas.secs.makeplots import make_plots
+
+from pyspedas.secs.config import CONFIG
+CONFIG['plots_dir'] = 'dir/'
+
+
+class SECSTestCases(unittest.TestCase):
+ @patch("matplotlib.pyplot.show")
+ def test_load_secs(self, mock_show):
+ trange = ['2017-03-27', '2017-03-28']
+ d = pyspedas.secs.data(trange=trange,
+ resolution=10,
+ dtype='SECS',
+ no_download=False,
+ downloadonly=False,
+ out_type='df')
+
+ make_plots(dtype='SECS',
+ dtime='2017-03-27/06:00:00',
+ vplot_sized=True,
+ contour_den=201,
+ s_loc=False,
+ quiver_scale=30)
+
+ @patch("matplotlib.pyplot.show")
+ def test_load_eics(self, mock_show):
+ trange = ['2017-03-27', '2017-03-28']
+ d = pyspedas.secs.data(trange=trange,
+ resolution=10,
+ dtype='EICS',
+ no_download=False,
+ downloadonly=False,
+ out_type='df')
+
+ make_plots(dtype='EICS',
+ dtime='2017-03-27/06:00:00',
+ vplot_sized=True,
+ contour_den=201,
+ s_loc=False,
+ quiver_scale=30)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/soho/README.md b/pyspedas/soho/README.md
new file mode 100644
index 00000000..1d6dcd52
--- /dev/null
+++ b/pyspedas/soho/README.md
@@ -0,0 +1,55 @@
+
+## Solar & Heliospheric Observatory (SOHO)
+The routines in this module can be used to load data from the Solar & Heliospheric Observatory (SOHO) mission.
+
+### Instruments
+- Charge, Element, and Isotope Analysis System (CELIAS)
+- Comprehensive Suprathermal and Energetic Particle Analyzer (COSTEP)
+- Energetic and Relativistic Nuclei and Electron experiment (ERNE)
+- Orbit (ephemeris and attitude) data (ORBIT)
+
+### Examples
+Get started by importing pyspedas and tplot; these are required to load and plot the data:
+
+```python
+import pyspedas
+from pytplot import tplot
+```
+
+#### Charge, Element, and Isotope Analysis System (CELIAS)
+
+```python
+celias_vars = pyspedas.soho.celias(trange=['2006-06-01', '2006-06-02'])
+
+tplot(['V_p', 'N_p'])
+```
+
+
+#### Comprehensive Suprathermal and Energetic Particle Analyzer (COSTEP)
+
+```python
+costep_vars = pyspedas.soho.costep(trange=['2006-06-01', '2006-06-02'])
+
+tplot(['P_int', 'He_int'])
+```
+
+
+#### Energetic and Relativistic Nuclei and Electron experiment (ERNE)
+
+```python
+erne_vars = pyspedas.soho.erne(trange=['2006-06-01', '2006-06-02'])
+
+tplot('PH')
+```
+
+
+#### Orbit (ephemeris and attitude) data (ORBIT)
+
+```python
+orbit_vars = pyspedas.soho.orbit(trange=['2006-06-01', '2006-06-02'])
+
+tplot(['GSE_POS', 'GSE_VEL'])
+```
+
+
+
\ No newline at end of file
diff --git a/pyspedas/soho/__init__.py b/pyspedas/soho/__init__.py
new file mode 100644
index 00000000..990b7db3
--- /dev/null
+++ b/pyspedas/soho/__init__.py
@@ -0,0 +1,310 @@
+from .load import load
+
+
+def celias(trange=['2006-06-01', '2006-06-02'],
+ datatype='pm_5min',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Charge, Element, and Isotope Analysis System (CELIAS)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'pm_5min' for L1 data
+ 'pm_30s' for L1 data
+ 'sem_1day' for L2 data
+ 'sem_15s' for L2 data
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars= load(instrument='celias', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return celias_postprocessing(tvars)
+
+
+def celias_postprocessing(variables):
+ """
+ Placeholder for CELIAS post-processing
+ """
+ return variables
+
+
+def costep(trange=['2006-06-01', '2006-06-02'],
+ datatype='ephin_l3i-1day',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Comprehensive Suprathermal and Energetic Particle Analyzer (COSTEP)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; options: (default: ephin_l3i-1day)
+ ephin_l3i-1day
+ ephin_l3i-1hr
+ ephin_l3i-30min
+ ephin_l3i-10min
+ ephin_l3i-5min
+ ephin_l3i-1min
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars= load(instrument='costep', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return costep_postprocessing(tvars)
+
+
+def costep_postprocessing(variables):
+ """
+ Placeholder for COSTEP post-processing
+ """
+ return variables
+
+
+def erne(trange=['2006-06-01', '2006-06-02'],
+ datatype='hed_l2-1min',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Energetic and Relativistic Nuclei and Electron experiment (ERNE)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'hed_l2-1min' for L2 data
+ 'led_l2-1min' for L2 data
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars= load(instrument='erne', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return erne_postprocessing(tvars)
+
+
+def erne_postprocessing(variables):
+ """
+ Placeholder for ERNE post-processing
+ """
+ return variables
+
+
+def orbit(trange=['2006-06-01', '2006-06-02'],
+ datatype='pre_or',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Orbit (ephemeris and attitude) data (ORBIT)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ datatype: str
+ Data type; Valid options:
+ 'def_at' for definitive attitude data
+ 'def_or' for definitive orbit data
+ 'pre_or' for preliminary orbit data (default)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ tvars= load(instrument='orbit', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return orbit_postprocessing(tvars)
+
+
+def orbit_postprocessing(variables):
+ """
+ Placeholder for ORBIT post-processing
+ """
+ return variables
+
+
+
diff --git a/pyspedas/soho/config.py b/pyspedas/soho/config.py
new file mode 100644
index 00000000..64dafd5f
--- /dev/null
+++ b/pyspedas/soho/config.py
@@ -0,0 +1,12 @@
+import os
+
+CONFIG = {'local_data_dir': 'soho_data/',
+ 'remote_data_dir': 'https://spdf.gsfc.nasa.gov/pub/data/soho/'}
+
+# override local data directory with environment variables
+if os.environ.get('SPEDAS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'soho'])
+
+if os.environ.get('SOHO_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.environ['SOHO_DATA_DIR']
+
\ No newline at end of file
diff --git a/pyspedas/soho/load.py b/pyspedas/soho/load.py
new file mode 100644
index 00000000..6bd26112
--- /dev/null
+++ b/pyspedas/soho/load.py
@@ -0,0 +1,73 @@
+import logging
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pytplot import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+
+def load(trange=['2006-06-01', '2006-06-02'],
+ instrument='celias',
+ datatype='pm_5min',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the SOHO mission; this function is not meant
+ to be called directly; instead, see the wrappers:
+
+ pyspedas.soho.celias
+ pyspedas.soho.cosp
+ pyspedas.soho.erne
+ pyspedas.soho.orbit
+
+ """
+ res = 24 * 3600.
+
+ if instrument == 'celias':
+ pathformat = instrument+'/'+datatype+'/%Y/soho_'+instrument+'-'+datatype+'_%Y%m%d_v??.cdf'
+ elif instrument == 'costep':
+ pathformat = instrument+'/'+datatype+'/%Y/soho_'+instrument+'-'+datatype+'_%Y0101_v??.??.cdf'
+ res = 24 * 3600. * 366
+ elif instrument == 'erne':
+ pathformat = instrument+'/'+datatype+'/%Y/soho_'+instrument+'-'+datatype+'_%Y%m%d_v??.cdf'
+ elif instrument == 'orbit':
+ if datatype not in ['pre_or', 'def_or', 'def_at']:
+ logging.error('Invalod datatype: ' + datatype)
+ return
+ datatype_fn = datatype.split('_')[1] + '_' +datatype.split('_')[0]
+ pathformat = instrument+'/'+datatype+'/cdf/%Y/so_'+datatype_fn+'_%Y%m%d_v??.cdf'
+
+ # find the full remote path names using the trange
+ remote_names = dailynames(file_format=pathformat, trange=trange, res=res)
+
+ out_files = []
+
+ files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update)
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ out_files = sorted(out_files)
+
+ if downloadonly:
+ return out_files
+
+ tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
+
+ if notplot:
+ return tvars
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
+
+
\ No newline at end of file
diff --git a/pyspedas/soho/tests/__init__.py b/pyspedas/soho/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/soho/tests/tests.py b/pyspedas/soho/tests/tests.py
new file mode 100644
index 00000000..f51231a5
--- /dev/null
+++ b/pyspedas/soho/tests/tests.py
@@ -0,0 +1,39 @@
+import os
+import unittest
+from pytplot import data_exists
+import pyspedas
+
+
+class LoadTestCases(unittest.TestCase):
+ def test_load_celias_data(self):
+ out_vars = pyspedas.soho.celias(time_clip=True)
+ self.assertTrue(data_exists('V_p'))
+ self.assertTrue(data_exists('N_p'))
+
+ def test_load_cosp_data(self):
+ out_vars = pyspedas.soho.costep(time_clip=True)
+ self.assertTrue(data_exists('P_int'))
+ self.assertTrue(data_exists('He_int'))
+
+ def test_load_erne_data(self):
+ out_vars = pyspedas.soho.erne(time_clip=True)
+ self.assertTrue(data_exists('PH'))
+
+ def test_load_orbit_data(self):
+ out_vars = pyspedas.soho.orbit(time_clip=True)
+ self.assertTrue(data_exists('GSE_POS'))
+ self.assertTrue(data_exists('GSE_VEL'))
+
+ def test_load_notplot(self):
+ out_vars = pyspedas.soho.erne(notplot=True)
+ self.assertTrue('PH' in out_vars)
+
+ def test_downloadonly(self):
+ files = pyspedas.soho.erne(downloadonly=True, trange=['2006-06-01', '2006-06-02'])
+ self.assertTrue(os.path.exists(files[0]))
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+
\ No newline at end of file
diff --git a/pyspedas/solo/__init__.py b/pyspedas/solo/__init__.py
index 829ed2a1..2af608a2 100644
--- a/pyspedas/solo/__init__.py
+++ b/pyspedas/solo/__init__.py
@@ -1,6 +1,7 @@
-
from pytplot import options
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def mag(trange=['2020-06-01', '2020-06-02'],
datatype='rtn-normal',
@@ -85,15 +86,14 @@ def mag(trange=['2020-06-01', '2020-06-02'],
if 'B_SRF'+suffix in mag_vars:
options('B_SRF'+suffix, 'legend_names', ['Bx (SRF)', 'By (SRF)', 'Bz (SRF)'])
options('B_SRF'+suffix, 'ytitle', ytitle)
- options('B_SRF'+suffix, 'color', ['b', 'g', 'r'])
if 'B_RTN'+suffix in mag_vars:
options('B_RTN'+suffix, 'legend_names', ['Br (RTN)', 'Bt (RTN)', 'Bn (RTN)'])
options('B_RTN'+suffix, 'ytitle', ytitle)
- options('B_RTN'+suffix, 'color', ['b', 'g', 'r'])
return mag_vars
+
def rpw(trange=['2020-06-15', '2020-06-16'],
datatype='hfr-surv',
level='l2',
@@ -194,6 +194,7 @@ def rpw(trange=['2020-06-15', '2020-06-16'],
"""
return load(instrument='rpw', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
def swa(trange=['2020-07-22', '2020-07-23'],
datatype='pas-eflux',
level='l2',
@@ -268,6 +269,7 @@ def swa(trange=['2020-07-22', '2020-07-23'],
return loaded_vars
+
def epd(trange=['2020-06-14', '2020-06-15'],
datatype='step',
mode='hcad',
@@ -336,3 +338,6 @@ def epd(trange=['2020-06-14', '2020-06-15'],
"""
return load(instrument='epd', trange=trange, level=level, datatype=datatype, mode=mode, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Solar Orbiter', instrument=instrument, label=label)
diff --git a/pyspedas/solo/load.py b/pyspedas/solo/load.py
index 7d4dec25..d39aa6e1 100644
--- a/pyspedas/solo/load.py
+++ b/pyspedas/solo/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
@@ -42,7 +42,10 @@ def load(trange=['2020-06-01', '2020-06-02'],
res = 60.0
if instrument == 'mag':
- pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf'
+ if level == 'll02':
+ pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/%Y/solo_'+level+'_'+instrument+'_'+date_format+'_v'+cdf_version+'.cdf'
+ else:
+ pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf'
elif instrument == 'epd':
pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/'+mode+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'-'+mode+'_'+date_format+'_v'+cdf_version+'.cdf'
elif instrument == 'rpw':
@@ -62,6 +65,8 @@ def load(trange=['2020-06-01', '2020-06-02'],
date_format = '%Y%m%dt%H%M??-*'
res = 60.0
pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf'
+ elif level == 'l3':
+ pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf'
# find the full remote path names using the trange
remote_names = dailynames(file_format=pathformat, trange=trange, res=res)
diff --git a/pyspedas/solo/tests/tests.py b/pyspedas/solo/tests/tests.py
index 218dabba..8c06942f 100644
--- a/pyspedas/solo/tests/tests.py
+++ b/pyspedas/solo/tests/tests.py
@@ -1,14 +1,22 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_load_mag_data(self):
mag_vars = pyspedas.solo.mag(time_clip=True)
self.assertTrue(data_exists('B_RTN'))
+ mag_vars = pyspedas.solo.mag(datatype='rtn-normal-1-minute')
+ self.assertTrue(data_exists('B_RTN'))
+ mag_vars = pyspedas.solo.mag(notplot=True, datatype='rtn-burst')
+ self.assertTrue('B_RTN' in mag_vars)
+
+ def test_load_mag_ll02_data(self):
+ mag_vars = pyspedas.solo.mag(level='ll02', trange=['2020-08-04', '2020-08-05'])
+ self.assertTrue(data_exists('B_RTN'))
+ self.assertTrue(data_exists('B_SRF'))
def test_load_epd_data(self):
epd_vars = pyspedas.solo.epd()
@@ -21,16 +29,28 @@ def test_load_rpw_data(self):
rpw_vars = pyspedas.solo.rpw()
self.assertTrue(data_exists('AVERAGE_NR'))
self.assertTrue(data_exists('TEMPERATURE'))
- self.assertTrue(data_exists('FLUX_DENSITY1'))
- self.assertTrue(data_exists('FLUX_DENSITY2'))
+ # self.assertTrue(data_exists('FLUX_DENSITY1'))
+ # self.assertTrue(data_exists('FLUX_DENSITY2'))
def test_load_swa_data(self):
swa_vars = pyspedas.solo.swa()
self.assertTrue(data_exists('eflux'))
+ swa_vars = pyspedas.solo.swa(level='l2', datatype='eas1-nm3d-def')
+ self.assertTrue(data_exists('SWA_EAS1_NM3D_DEF_Data'))
+ swa_vars = pyspedas.solo.swa(notplot=True)
+ self.assertTrue('eflux' in swa_vars)
+
+ def test_load_swa_l1_data(self):
+ swa_vars = pyspedas.solo.swa(level='l1', datatype='eas-padc')
+ self.assertTrue(data_exists('SWA_EAS_BM_Data'))
+ self.assertTrue(data_exists('SWA_EAS_MagDataUsed'))
+ swa_vars = pyspedas.solo.swa(level='l1', datatype='his-pha', trange=['2020-06-03', '2020-06-04'])
+ self.assertTrue(data_exists('HIS_PHA_EOQ_STEP'))
def test_downloadonly(self):
files = pyspedas.solo.mag(downloadonly=True)
self.assertTrue(os.path.exists(files[0]))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/sosmag/README.md b/pyspedas/sosmag/README.md
new file mode 100644
index 00000000..cf906f4d
--- /dev/null
+++ b/pyspedas/sosmag/README.md
@@ -0,0 +1,47 @@
+## Service Oriented Spacecraft Magnetometer on GEO-KOMPSAT-2A (SOSMAG)
+The routines in this module can be used to load data from the SOSMAG magnetometer.
+
+For more information, see:
+- [SOSMAG at the ESA Space Weather Service Network](https://swe.ssa.esa.int/sosmag)
+
+The data is loaded using the ESA HAPI server (requires registration):
+- [ESA HAPI server](https://swe.ssa.esa.int/hapi)
+
+Users should register with ESA and then use their own username and password in the file sosmag/load.py
+- [ESA registration](https://swe.ssa.esa.int/registration/)
+
+
+### Instruments
+- Magnetometer (MAG)
+
+Magnetic Field Data with 1-16Hz from SOSMAG on GEO-KOMPSAT-2A in geostationary orbit at 128.2E.
+
+
+### Data types
+There are two datatypes available:
+
+- Near-realtime ('1m')
+
+```python
+tplot_ok, var_names = sosmag_load(trange=['2021-01-01 02:00:00', '2021-01-01 03:00:00'], datatype='1m')
+```
+
+- Recalibrated L2 ('', the default)
+
+```python
+tplot_ok, var_names = sosmag_load(trange=['2021-01-01 02:00:00', '2021-01-01 03:00:00'], datatype='')
+```
+
+
+### Example
+First import the required functions from pyspedas and tplot.
+
+Then load and plot the magnetometer data.
+
+```python
+from pytplot import tplot
+from pyspedas import sosmag_load
+
+tplot_ok, var_names = sosmag_load(trange=['2021-01-01 02:00:00', '2021-01-01 03:00:00'], datatype='1m')
+tplot(var_names)
+```
diff --git a/pyspedas/sosmag/__init__.py b/pyspedas/sosmag/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/sosmag/load.py b/pyspedas/sosmag/load.py
new file mode 100644
index 00000000..d4d39a2a
--- /dev/null
+++ b/pyspedas/sosmag/load.py
@@ -0,0 +1,535 @@
+'''
+Load data from SOSMAG Space Weather instrument
+ flying on the Korean GEO-KOMPSAT-2A satellite.
+
+This function downloads data directly from the ESA HAPI server.
+The ESA HAPI server requires authentication.
+Users have to register with the ESA server
+ and replace their username and password in the code below.
+
+Notes
+-----
+https://swe.ssa.esa.int/sosmag
+https://swe.ssa.esa.int/hapi
+Data types available:
+ 1. (datatype='1m', esa_gk2a_sosmag_1m)
+ Near-realtime Magnetic Field Data with 1-16Hz from SOSMAG
+ on GEO-KOMPSAT-2A in geostationary orbit at 128.2E.
+ 'spase://SSA/NumericalData/GEO-KOMPSAT-2A/esa_gk2a_sosmag_1m'
+
+ 2. (default, datatype='', esa_gk2a_sosmag_recalib)
+ Recalibrated L2 Magnetic Field Data with 1-16Hz from SOSMAG
+ on GEO-KOMPSAT-2A in geostationary orbit at 128.2E.
+ 'spase://SSA/NumericalData/GEO-KOMPSAT-2A/esa_gk2a_sosmag_recalib'
+
+Example
+-------
+from pytplot import tplot
+from pyspedas import sosmag_load
+t_ok, var_names = sosmag_load(trange=['2021-01-01 02:00','2021-01-01 03:00'])
+tplot(var_names)
+
+'''
+import requests
+import json
+import numpy as np
+from pytplot import store_data, options
+from pyspedas import time_double, time_string
+
+
+# Global parameters for the functions in this file.
+# Users must replace username and password with their ESA credentials.
+sosmag_parameters = {
+ 'username': 'spedas', # Users must replace with their ESA username.
+ 'password': 'acQ4pG6u9Bh26v2', # User ESA password.
+ 'portal_url': 'https://swe.ssa.esa.int/',
+ 'authenticate_url': 'https://sso.ssa.esa.int/am/json/authenticate',
+ 'sso_cookiename': 'iPlanetDirectoryPro',
+ 'print_errors': True, # If true, prints error messages.
+ 'print_messages': False, # If true, prints debugging/success messages.
+}
+
+
+def sosmag_get_auth_cookie():
+ '''
+ Authenticates a user against OpenAM.
+
+ Uses the sosmag_parameters dictionary for the parameters needed.
+ Returns whether authentication was successful and the obtained cookie.
+ If an error occurs, the exception is caught and the error printed.
+
+ Parameters
+ ----------
+ None.
+
+ Returns
+ -------
+ success: bool
+ True, if authentication was successful
+ auth_cookie: str
+ The obtained authentication cookie
+ '''
+ success = False
+ auth_cookie = ''
+ try:
+ # Send a POST request to the authentication url
+ response = requests.post(
+ sosmag_parameters['authenticate_url'],
+ headers={
+ 'Content-Type': 'application/json',
+ 'X-OpenAM-Username': sosmag_parameters['username'],
+ 'X-OpenAM-Password': sosmag_parameters['password'],
+ },
+ data='{}')
+ # form the response, extract the auth cookie and return it
+ token_dict = json.loads(response.content)
+ auth_cookie = token_dict['tokenId']
+ success = True
+ except Exception as exc:
+ success = False
+ auth_cookie = ''
+ if sosmag_parameters['print_errors']:
+ print(exc)
+ finally:
+ return success, auth_cookie
+
+
+def sosmag_get_session(auth_cookie):
+ '''
+ Establishes a session with the ESA HAPI server.
+
+ Uses the authentication cookie obtained from sosmag_get_auth_cookie().
+ Returns whether a session was established successfully and if so,
+ the obtained session cookies.
+ If an error occurs, the exception is caught and the error is printed.
+
+ Parameters
+ ----------
+ auth_cookie: str
+ The obtained authentication cookie from sosmag_get_auth_cookie()
+
+ Returns
+ -------
+ success: bool
+ True, if authentication was successful
+ jsession_id: str
+ The obtained session cookie
+ xsrf_token: str
+ The obtained xsrf token
+ '''
+ success = False
+ jsession_id = ''
+ xsrf_token = ''
+ try:
+ # Try to access the HAPI/capabilities using the auth_cookie
+ init_response = requests.get(
+ sosmag_parameters['portal_url'] + '/hapi/capabilities',
+ cookies={
+ sosmag_parameters['sso_cookiename']: auth_cookie,
+ }
+ )
+ # Extract the session cookies from the very first response from HAPI
+ cookie_jar = init_response.history[0].cookies
+ jsession_id = cookie_jar.get('JSESSIONID')
+ xsrf_token = cookie_jar.get('XSRF-TOKEN')
+ # Extract the consent url we are being requested to send our consent to
+ # (in case we didn't consent yet)
+ consent_url = init_response.url
+ content = init_response.content
+
+ # If we consented already, we should have received HAPI/capabilities.
+ if '/hapi/capabilities' not in consent_url:
+ # If not, we need to give our consent in the next step.
+ # Send the consent along with all cookies to the consent url.
+ consent_response = requests.post(
+ consent_url,
+ cookies={
+ sosmag_parameters['sso_cookiename']: auth_cookie,
+ 'JSESSIONID': jsession_id,
+ 'XSRF_TOKEN': xsrf_token,
+ },
+ data={
+ 'decision': 'Allow',
+ 'save_consent': 'on',
+ }
+ )
+ content = consent_response.content
+ # This will result in a redirect to the initial HAPI/capabilities.
+ capabilities = json.loads(content)
+ # The json output should be:
+ # 2022/10/24: {'HAPI':'2.1.0','status':{'code':1200,'message':'OK'},'outputFormats':['csv','json']}
+ # previous: {'version':'2.1.0','status':{'code':1200,'message':'OK'},'outputFormats':['csv','json']}
+ version = capabilities['HAPI']
+ status = capabilities['status']
+ # If the output is what we expect, return True and the session cookies
+ if version != '' and status != {} and status['message'] == 'OK':
+ success = True
+ else:
+ success = False
+ jsession_id = ''
+ xsrf_token = ''
+ except Exception as exc:
+ if sosmag_parameters['print_errors']:
+ print(exc)
+ success = False
+ jsession_id = ''
+ xsrf_token = ''
+
+ return success, jsession_id, xsrf_token
+
+
+def sosmag_get_capabilities(jsession_id, xsrf_token):
+ '''
+ Gets HAPI/capabilities from server.
+
+ Uses the two cookies obtained using sosmag_get_session_cookies().
+ If an error occurs, the exception is caught and the error printed.
+
+ Parameters
+ ----------
+ jsession_id: str
+ The obtained session cookie
+ xsrf_token: str
+ The obtained xsrf token
+
+ Returns
+ -------
+ success: bool
+ True, if function was successful
+ capabilities: dict of str
+ The obtained capabilities as a json dictionary of strings
+ '''
+ success = False
+ capabilities = {}
+ try:
+ # Send a GET request to the HAPI/capabilities.
+ test_response = requests.get(
+ sosmag_parameters['portal_url'] + '/hapi/capabilities',
+ cookies={
+ 'JSESSIONID': jsession_id,
+ 'XSRF_TOKEN': xsrf_token,
+ }
+ )
+ # Extract the capabilities from the response.
+ capabilities = json.loads(test_response.content)
+ version = capabilities['HAPI']
+ status = capabilities['status']
+ # If the capabilities are as expected, return True.
+ if version != '' and status != {} and status['message'] == 'OK':
+ success = True
+ else:
+ success = False
+ capabilities = {}
+ except Exception as exc:
+ if sosmag_parameters['print_errors']:
+ print(exc)
+ success = False
+ capabilities = {}
+
+ return success, capabilities
+
+
+def sosmag_get_data(
+ jsession_id, xsrf_token, # HAPI session cookies.
+ datatype='', # Data type, either '' (recalibrated) or '1m' (real time)
+ timemin='2022-01-31T01:00:00.000Z', # Start time
+ timemax='2022-01-31T01:10:00.000Z', # End time
+):
+ '''
+ Gets either HAPI/data or HAPI/info from server.
+
+ Uses the two cookies obtained from sosmag_get_session_cookies().
+ If an error occurs, the exception is caught and the error printed.
+
+ Parameters
+ ----------
+ jsession_id: str
+ The obtained session cookie.
+ xsrf_token: str
+ The obtained xsrf token cookie.
+ datatype: str
+ Data type, either '' (recalibrated) or '1m' (1 minute, real time).
+ timemin: str
+ Start time, for example: '2022-01-31T01:00:00.000Z'.
+ timemax: str
+ End time, for example: '2022-01-31T01:10:00.000Z'.
+
+ Returns
+ -------
+ success: bool
+ True, if function was successful.
+ data: dict of str
+ The obtained data as a json dictionary of strings.
+ parameters: dict of str
+ The obtained parameters for the data as a json dictionary of strings.
+ description: str
+ The obtained description of data.
+ '''
+ success = False
+ data = {}
+ parameters = {}
+ description = ''
+ dataid = 'spase://SSA/NumericalData/GEO-KOMPSAT-2A/'
+ if datatype == '1m':
+ dataid = dataid + 'esa_gk2a_sosmag_1m'
+ else:
+ dataid = dataid + 'esa_gk2a_sosmag_recalib'
+
+ # This query string can also be used in a browser.
+ hquery = ('data?id=' + dataid +
+ '&time.min=' + timemin +
+ '&time.max=' + timemax +
+ '&format=json')
+ fullurl = sosmag_parameters['portal_url'] + '/hapi/' + hquery
+ if sosmag_parameters['print_messages']:
+ print(fullurl)
+
+ try:
+ # send a GET request to the HAPI server
+ test_response = requests.get(
+ fullurl,
+ cookies={
+ 'JSESSIONID': jsession_id,
+ 'XSRF_TOKEN': xsrf_token,
+ }
+ )
+ # extract the content from the response
+ dat = json.loads(test_response.content)
+ data = dat['data']
+ parameters = dat['parameters']
+ description = dat['description']
+ status = dat['status']
+
+ # if the status are as expected, return True along with the data dict
+ if data != '' and status != {} and status['message'] == 'OK':
+ success = True
+ else:
+ success = False
+ data = {}
+ parameters = {}
+ description = ''
+ except Exception as exc:
+ if sosmag_parameters['print_errors']:
+ print(exc)
+ success = False
+ data = {}
+ parameters = {}
+ description = ''
+
+ return success, data, parameters, description
+
+
+def sosmag_to_tplot(
+ data, # Data, array with 14 fields
+ parameters, # Description of data, array with 14 fields
+ desc, # Single string description of the data set
+ datatype='', # Either '' (recalibrated) or '1m' (real time)
+ prefix='', # To be added as prefix of tplot variable
+ suffix='' # To be added as suffix of tplot variable
+):
+ '''
+ Saves SOSMAG data as tplot variables.
+
+ Creates three tplot variables:
+ 1. Magnetic Field B in GSE coordinates, sosmag_b_gse.
+ 2. Magnetic Field B in HPEN coordinates, sosmag_b_hpen.
+ 3. Spacecraft Position in GSE, sosmag_pos.
+
+ Parameters
+ ----------
+ data: dict of str
+ The obtained data as a json dictionary of strings.
+ parameters: dict of str
+ The obtained parameters for the data as a json dictionary of strings.
+ description: str
+ The obtained description for this set of data.
+ datatype: str
+ Data type can be either '' (recalibrated) or '1m' (1 minute real time).
+ prefix: str
+ Prefix for tplot names. Default is ''.
+ suffix: str
+ Suffix for tplot names. Default is ''.
+
+ Returns
+ -------
+ success: bool
+ True, if function was successful.
+ var_names: list of str
+ Names of tplot variables created.
+ '''
+ success = False
+ var_names = []
+ try:
+ # Construct tplot variable names.
+ if datatype == '':
+ pre = prefix + 'sosmag'
+ else:
+ pre = prefix + 'sosmag_' + datatype
+
+ # Get data
+ d = np.array(data) # data
+ p = np.array(parameters) # parameters
+
+ if len(d.shape) != 2 or len(p.shape) != 1 or d.shape[1] != 14:
+ print('SOSMAG data has wrong shape. Abort.')
+ return False
+
+ # Time is the 0th field
+ td = np.array(time_double(d[:, 0]))
+
+ # Magnetic field in GSE 'b_gse_x', 'b_gse_y', 'b_gse_z'
+ # Data fields: [2, 3, 4]
+ yd = np.array(d[:, 2:5], dtype=float)
+ var_name0 = pre + '_b_gse' + suffix
+ pnames = [p[2]['name'], p[3]['name'], p[4]['name']]
+ pd = 'Magnetic Field B in GSE coordinates'
+ attr_dict = {'description': pd}
+ store_data(var_name0, data={'x': td, 'y': yd}, attr_dict=attr_dict)
+ options(var_name0, 'legend_names', pnames)
+ options(var_name0, 'ysubtitle', '[nT]')
+ options(var_name0, 'coord_sys', 'gse')
+ options(var_name0, 'description', desc)
+
+ # Magnetic field in HPEN 'b_hpen_x', 'b_hpen_y', 'b_hpen_z'
+ # Data fields: [5, 6, 7]
+ yd = np.array(d[:, 5:8], dtype=float)
+ var_name1 = pre + '_b_hpen' + suffix
+ pnames = [p[5]['name'], p[6]['name'], p[7]['name']]
+ pd = 'Magnetic Field B in HPEN coordinates'
+ attr_dict = {'description': pd}
+ store_data(var_name1, data={'x': td, 'y': yd}, attr_dict=attr_dict)
+ options(var_name1, 'legend_names', pnames)
+ options(var_name1, 'ysubtitle', '[nT]')
+ options(var_name1, 'coord_sys', 'hpen')
+ options(var_name1, 'description', desc)
+
+ # Spacecraft Position in GSE 'position_x', 'position_y', 'position_z'
+ # Data fields: [8, 9, 10]
+ yd = np.array(d[:, 8:11], dtype=float)
+ var_name2 = pre + '_position' + suffix
+ pnames = [p[8]['name'], p[9]['name'], p[10]['name']]
+ pd = 'Spacecraft Position in GSE'
+ attr_dict = {'description': pd}
+ store_data(var_name2, data={'x': td, 'y': yd}, attr_dict=attr_dict)
+ options(var_name2, 'legend_names', pnames)
+ options(var_name2, 'ysubtitle', '[km]')
+ options(var_name2, 'coord_sys', 'gse')
+ options(var_name2, 'description', desc)
+
+ # Print the variable names created
+ var_names = [var_name0, var_name1, var_name2]
+ print('Tplot variables created!')
+ print(var_names)
+ success = True
+ except Exception as exc:
+ if sosmag_parameters['print_errors']:
+ print(exc)
+ success = False
+
+ return success, var_names
+
+
+def sosmag_load(
+ trange=['2022-01-31T01:00:00.000Z',
+ '2022-01-31T01:01:00.000Z'], # Default 10 minutes of data
+ datatype='', # Either '' (recalibrated) or '1m' (real time)
+ prefix='', # Prefix for tplot names
+ suffix='', # Suffix for tplot names
+):
+ '''
+ This function loads data from SOSMAG.
+
+ Gets data from the ESA web server as a json string.
+ Creates three tplot variables:
+ 1. Magnetic Field B in GSE coordinates, sosmag_b_gse
+ 2. Magnetic Field B in HPEN coordinates, sosmag_b_hpen
+ 3. Spacecraft Position in GSE, sosmag_pos
+
+ Parameters
+ ----------
+ trange: list of str
+ Start and end times.
+ Many time formats are supported, see function time_double().
+ datatype: str
+ Either '' (recalibrated) or '1m' (real time).
+ prefix: str
+ Prefix for tplot names. Default is ''.
+ suffix: str
+ Suffix for tplot names. Default is ''.
+
+ Returns
+ -------
+ success: bool
+ True, if function was successful.
+ var_names: list of str
+ Names of tplot variables created.
+
+ Notes
+ -----
+ Link that can be tested on a browser:
+ https://swe.ssa.esa.int/hapi/data?
+ id=spase://SSA/NumericalData/GEO-KOMPSAT-2A/esa_gk2a_sosmag_recalib
+ &time.min=2021-01-31T01:00:00.000Z
+ &time.max=2021-01-31T01:01:00.000Z&format=json
+ '''
+ success = False
+ var_names = []
+ # Make sure that time is in the correct format for the ESA server.
+ trange = time_string(time_double(trange), fmt='%Y-%m-%dT%H:%M:%S.000Z')
+ if sosmag_parameters['print_messages']:
+ print('trange', trange)
+
+ # Authenticate with the HAPI server and receive a cookie.
+ authenticated, auth_cookie = sosmag_get_auth_cookie()
+ if authenticated:
+ if sosmag_parameters['print_messages']:
+ print('SOSMAG authentication successful.')
+ else:
+ print('SOSMAG authentication failed. \
+ Please check username and password in the file sosmag:load.py.')
+ return
+
+ # Set a session with the server.
+ capok = False
+ if authenticated:
+ sessionοκ, jsession_id, xsrf_token = sosmag_get_session(auth_cookie)
+ if sessionοκ:
+ # Session successfully established. Obtained session cookies.
+ # Testing session cookies on hapi/capabilities.
+ if sosmag_parameters['print_messages']:
+ print('SOSMAG session established successfully.')
+ capok, capabilities = sosmag_get_capabilities(jsession_id,
+ xsrf_token)
+ if capok:
+ if sosmag_parameters['print_messages']:
+ print('SOSMAG HAPI server capabilities: ')
+ print(capabilities)
+ else:
+ print('Problem communicating with server. Aborting.')
+ return
+ else:
+ print('SOSMAG session could not be established. Aborting.')
+ return
+
+ # Get data.
+ dataok = False
+ if capok:
+ dataok, data, parameters, desc = sosmag_get_data(
+ jsession_id, xsrf_token, datatype=datatype,
+ timemin=trange[0], timemax=trange[1])
+ if dataok:
+ tplotok, var_names = sosmag_to_tplot(
+ data, parameters, desc, datatype=datatype,
+ prefix=prefix, suffix=suffix)
+ if tplotok:
+ if sosmag_parameters['print_messages']:
+ print('Data was loaded.')
+ success = True
+ else:
+ print('Could load data into pytplot. Aborting.')
+ return
+ else:
+ print('Could not get any data. Aborting.')
+ return
+
+ return success, var_names
diff --git a/pyspedas/sosmag/tests/__init__.py b/pyspedas/sosmag/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/sosmag/tests/tests.py b/pyspedas/sosmag/tests/tests.py
new file mode 100644
index 00000000..04b5e140
--- /dev/null
+++ b/pyspedas/sosmag/tests/tests.py
@@ -0,0 +1,39 @@
+import unittest
+import pyspedas
+from pyspedas import sosmag_load
+from pytplot import data_exists
+
+pyspedas.sosmag.load.sosmag_parameters['print_messages'] = True
+
+
+class SOSMAG_Tests(unittest.TestCase):
+ def test_mag(self):
+ t_ok, var_names = sosmag_load(trange=['2021-01-01 02:00', '2021-01-01 03:00'])
+ self.assertTrue(data_exists('sosmag_b_gse'))
+ self.assertTrue(data_exists('sosmag_position'))
+
+ def test_1m(self):
+ t_ok, var_names = sosmag_load(datatype='1m', trange=['2021-01-01 02:00', '2021-01-01 03:00'])
+ self.assertTrue(data_exists('sosmag_1m_b_gse'))
+ self.assertTrue(data_exists('sosmag_1m_position'))
+
+ def test_invalid_user(self):
+ pyspedas.sosmag.load.sosmag_parameters['username'] = 'not_valid'
+ try:
+ t_ok, var_names = sosmag_load(datatype='1m', trange=['2021-01-01 02:00', '2021-01-01 03:00'])
+ except:
+ pass
+ pyspedas.sosmag.load.sosmag_parameters['username'] = 'spedas'
+
+ def test_invalid_cookies(self):
+ authenticated, auth_cookie = pyspedas.sosmag.load.sosmag_get_auth_cookie()
+ # shouldn't work
+ invalid = pyspedas.sosmag.load.sosmag_get_session('')
+ invalid = pyspedas.sosmag.load.sosmag_get_capabilities('', '')
+ invalid = pyspedas.sosmag.load.sosmag_get_data('', '')
+ invalid = pyspedas.sosmag.load.sosmag_to_tplot([1], '', '')
+ invalid = pyspedas.sosmag.load.sosmag_to_tplot(None, '', '')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/st5/README.md b/pyspedas/st5/README.md
new file mode 100644
index 00000000..5e96b326
--- /dev/null
+++ b/pyspedas/st5/README.md
@@ -0,0 +1,25 @@
+
+## Space Technology 5 (ST5)
+The routines in this module can be used to load data from the Space Technology 5 (ST5) mission.
+
+### Instruments
+- Magnetometer (MAG)
+
+### Examples
+Get started by importing pyspedas and tplot; these are required to load and plot the data:
+
+```python
+import pyspedas
+from pytplot import tplot
+```
+
+#### Magnetometer (MAG)
+
+```python
+mag_vars = pyspedas.st5.mag(trange=['2006-06-01', '2006-06-02'])
+
+tplot(['B_SM', 'SC_POS_SM'])
+```
+
+
+
\ No newline at end of file
diff --git a/pyspedas/st5/__init__.py b/pyspedas/st5/__init__.py
new file mode 100644
index 00000000..6cc2635c
--- /dev/null
+++ b/pyspedas/st5/__init__.py
@@ -0,0 +1,84 @@
+from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
+
+def mag(trange=['2006-06-01', '2006-06-02'],
+ probe='094',
+ datatype='1sec',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the Magnetometer (MAG)
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str
+ Probe #; Valid options: '094', '224', '155'
+
+ datatype: str
+ Data type; options: '1sec' (default: 1sec)
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+
+ tvars = load(instrument='mag', trange=trange, datatype=datatype, probe=probe, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+ if tvars is None or notplot or downloadonly:
+ return tvars
+
+ return mag_postprocessing(tvars)
+
+
+def mag_postprocessing(variables):
+ """
+ Placeholder for MAG post-processing
+ """
+ return variables
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='ST5', instrument=instrument, label=label)
diff --git a/pyspedas/st5/config.py b/pyspedas/st5/config.py
new file mode 100644
index 00000000..ce310f83
--- /dev/null
+++ b/pyspedas/st5/config.py
@@ -0,0 +1,12 @@
+import os
+
+CONFIG = {'local_data_dir': 'st5_data/',
+ 'remote_data_dir': 'https://spdf.gsfc.nasa.gov/pub/data/st5/'}
+
+# override local data directory with environment variables
+if os.environ.get('SPEDAS_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'st5'])
+
+if os.environ.get('ST5_DATA_DIR'):
+ CONFIG['local_data_dir'] = os.environ['ST5_DATA_DIR']
+
\ No newline at end of file
diff --git a/pyspedas/st5/load.py b/pyspedas/st5/load.py
new file mode 100644
index 00000000..ce69346d
--- /dev/null
+++ b/pyspedas/st5/load.py
@@ -0,0 +1,57 @@
+from pyspedas.utilities.dailynames import dailynames
+from pyspedas.utilities.download import download
+from pytplot import time_clip as tclip
+from pytplot import cdf_to_tplot
+
+from .config import CONFIG
+
+
+def load(trange=['2006-06-01', '2006-06-02'],
+ instrument='mag',
+ probe='',
+ datatype='1sec',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the ST5 mission; this function is not meant
+ to be called directly; instead, see the wrappers:
+
+ pyspedas.st5.mag
+
+ """
+
+ pathformat = probe+'/'+instrument+'/%Y/st5-'+probe+'_'+datatype+'_'+instrument+'_%Y%m%d_v?.?.?.cdf'
+
+ # find the full remote path names using the trange
+ remote_names = dailynames(file_format=pathformat, trange=trange)
+
+ out_files = []
+
+ files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update)
+ if files is not None:
+ for file in files:
+ out_files.append(file)
+
+ out_files = sorted(out_files)
+
+ if downloadonly:
+ return out_files
+
+ tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
+
+ if notplot:
+ return tvars
+
+ if time_clip:
+ for new_var in tvars:
+ tclip(new_var, trange[0], trange[1], suffix='')
+
+ return tvars
+
+
\ No newline at end of file
diff --git a/pyspedas/st5/tests/__init__.py b/pyspedas/st5/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/st5/tests/tests.py b/pyspedas/st5/tests/tests.py
new file mode 100644
index 00000000..021e9efa
--- /dev/null
+++ b/pyspedas/st5/tests/tests.py
@@ -0,0 +1,24 @@
+import os
+import unittest
+from pytplot import data_exists
+import pyspedas
+
+
+class LoadTestCases(unittest.TestCase):
+ def test_load_mag_data(self):
+ out_vars = pyspedas.st5.mag(time_clip=True)
+ self.assertTrue(data_exists('B_SM'))
+
+ def test_load_notplot(self):
+ out_vars = pyspedas.st5.mag(notplot=True)
+ self.assertTrue('B_SM' in out_vars)
+
+ def test_downloadonly(self):
+ files = pyspedas.st5.mag(downloadonly=True, trange=['2006-06-01', '2006-06-02'])
+ self.assertTrue(os.path.exists(files[0]))
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+
\ No newline at end of file
diff --git a/pyspedas/stereo/README.md b/pyspedas/stereo/README.md
index 04742efc..82e886a2 100644
--- a/pyspedas/stereo/README.md
+++ b/pyspedas/stereo/README.md
@@ -5,6 +5,8 @@ The routines in this module can be used to load data from the STEREO mission.
### Instruments
- Magnetometer (MAG)
- PLAsma and SupraThermal Ion Composition (PLASTIC)
+- STEREO Electromagnetic Waves Experiement (SWAVES)
+- Beacon (low resolution beacon data)
### Examples
Get started by importing pyspedas and tplot; these are required to load and plot the data:
@@ -65,3 +67,17 @@ plastic_vars = pyspedas.stereo.plastic(trange=['2013-11-5', '2013-11-6'])
tplot(['proton_number_density', 'proton_bulk_speed', 'proton_temperature', 'proton_thermal_speed'])
```
+
+#### STEREO/WAVES (S/WAVES)
+
+```python
+hfr_vars = pyspedas.stereo.waves(trange=['2013-11-5', '2013-11-6'])
+tplot(['PSD_FLUX'])
+```
+
+#### Beacon Data
+
+```python
+beacon_vars = pyspedas.stereo.beacon(trange=['2013-11-5', '2013-11-6'])
+tplot(['MAGBField'])
+```
\ No newline at end of file
diff --git a/pyspedas/stereo/__init__.py b/pyspedas/stereo/__init__.py
index f7db8232..8cd16cc0 100644
--- a/pyspedas/stereo/__init__.py
+++ b/pyspedas/stereo/__init__.py
@@ -523,3 +523,131 @@ def plastic(trange=['2013-11-5', '2013-11-6'],
"""
return load(instrument='plastic', trange=trange, probe=probe, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+def waves(trange=['2013-11-5', '2013-11-6'],
+ probe='a',
+ datatype='hfr',
+ level='l3',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the WAVES instrument
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str
+ Spacecraft probe ('a' for ahead, 'b' for behind)
+
+ datatype: str
+ Data type; Valid options: hfr, lfr
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ return load(instrument='waves', trange=trange, probe=probe, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+def beacon(trange=['2013-11-5', '2013-11-6'],
+ probe='a',
+ suffix='',
+ get_support_data=False,
+ varformat=None,
+ varnames=[],
+ downloadonly=False,
+ notplot=False,
+ no_update=False,
+ time_clip=False):
+ """
+ This function loads data from the WAVES instrument
+
+ Parameters
+ ----------
+ trange : list of str
+ time range of interest [starttime, endtime] with the format
+ 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
+
+ probe: str
+ Spacecraft probe ('a' for ahead, 'b' for behind)
+
+ datatype: str
+ Data type; Valid options: hfr, lfr
+
+ suffix: str
+ The tplot variable names will be given this suffix. By default,
+ no suffix is added.
+
+ get_support_data: bool
+ Data with an attribute "VAR_TYPE" with a value of "support_data"
+ will be loaded into tplot. By default, only loads in data with a
+ "VAR_TYPE" attribute of "data".
+
+ varformat: str
+ The file variable formats to load into tplot. Wildcard character
+ "*" is accepted. By default, all variables are loaded in.
+
+ varnames: list of str
+ List of variable names to load (if not specified,
+ all data variables are loaded)
+
+ downloadonly: bool
+ Set this flag to download the CDF files, but not load them into
+ tplot variables
+
+ notplot: bool
+ Return the data in hash tables instead of creating tplot variables
+
+ no_update: bool
+ If set, only load data from your local cache
+
+ time_clip: bool
+ Time clip the variables to exactly the range specified in the trange keyword
+
+ Returns
+ ----------
+ List of tplot variables created.
+
+ """
+ return load(instrument='beacon', trange=trange, probe=probe, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
diff --git a/pyspedas/stereo/load.py b/pyspedas/stereo/load.py
index 06653f6c..5a98ceb1 100644
--- a/pyspedas/stereo/load.py
+++ b/pyspedas/stereo/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
@@ -28,6 +28,11 @@ def load(trange=['2013-11-5', '2013-11-6'],
"""
out_files = []
+ all_instr = ['mag','plastic','swea','ste',
+ 'sept', 'sit', 'let', 'het',
+ 'waves', 'beacon'
+ ]
+ assert instrument in all_instr, f"Instrument {instrument} not in {all_instr}"
if not isinstance(probe, list):
probe = [probe]
@@ -52,11 +57,16 @@ def load(trange=['2013-11-5', '2013-11-6'],
pathformat = 'plastic/level2/Protons/Derived_from_1D_Maxwellian/'+direction+'/'+datatype+'/%Y/ST'+prb.upper()+'_L2_PLA_1DMax_'+datatype+'_%Y%m%d_V??.cdf'
elif instrument == 'swea':
CONFIG['remote_data_dir'] = 'https://spdf.gsfc.nasa.gov/pub/data/stereo/'
- pathformat = direction + '/' + level + '/impact/swea_' + datatype + '/%Y/sta_' + level + '_swea_' + datatype + '_%Y%m%d_v??.cdf'
+ pathformat = direction + '/' + level + '/impact/swea_' + datatype + f'/%Y/st{prb}_' + level + '_swea_' + datatype + '_%Y%m%d_v??.cdf'
elif instrument in ['ste', 'sept', 'sit', 'let', 'het']:
CONFIG['remote_data_dir'] = 'https://spdf.gsfc.nasa.gov/pub/data/stereo/'
- pathformat = direction + '/' + level + '/impact/' + instrument + '/%Y/sta_' + level + '_' + instrument + '_%Y%m%d_v??.cdf'
-
+ pathformat = direction + '/' + level + '/impact/' + instrument + f'/%Y/st{prb}_' + level + '_' + instrument + '_%Y%m%d_v??.cdf'
+ elif instrument == "waves" :
+ CONFIG['remote_data_dir'] = 'https://spdf.gsfc.nasa.gov/pub/data/stereo/'
+ pathformat = direction + '/' + level + '/waves/' + datatype + f'/%Y/st{prb}_' + level + '_wav_' + datatype + '_%Y%m%d_v??.cdf'
+ elif instrument == 'beacon':
+ CONFIG['remote_data_dir'] = 'https://spdf.gsfc.nasa.gov/pub/data/stereo/'
+ pathformat = direction + '/' + instrument + f'/%Y/st{prb}_lb_impact_'+'%Y%m%d_v??.cdf'
# find the full remote path names using the trange
remote_names = dailynames(file_format=pathformat, trange=trange)
diff --git a/pyspedas/stereo/tests/tests.py b/pyspedas/stereo/tests/tests.py
index 24bd9ba9..5c4b537e 100644
--- a/pyspedas/stereo/tests/tests.py
+++ b/pyspedas/stereo/tests/tests.py
@@ -1,10 +1,9 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_downloadonly(self):
files = pyspedas.stereo.mag(trange=['2013-1-6', '2013-1-7'], downloadonly=True)
@@ -13,6 +12,9 @@ def test_downloadonly(self):
def test_load_mag_data(self):
mag_vars = pyspedas.stereo.mag(trange=['2013-11-5', '2013-11-6'], time_clip=True)
self.assertTrue(data_exists('BFIELD'))
+ mag_vars = pyspedas.stereo.mag(trange=['2013-11-5', '2013-11-6'], datatype='32hz')
+ mag_vars = pyspedas.stereo.mag(trange=['2013-11-5', '2013-11-6'], notplot=True)
+ self.assertTrue('BFIELD' in mag_vars)
def test_load_swea_data(self):
swea_vars = pyspedas.stereo.swea(trange=['2013-1-5', '2013-1-6'], time_clip=True)
@@ -45,5 +47,23 @@ def test_load_plastic_data(self):
self.assertTrue(data_exists('proton_bulk_speed'))
self.assertTrue(data_exists('proton_temperature'))
+ def test_load_waves_data_a(self):
+ w_vars = pyspedas.stereo.waves(trange=['2013-11-5', '2013-11-6'], probe='a')
+ self.assertTrue(data_exists('PSD_FLUX'))
+ self.assertTrue(data_exists('PSD_SFU'))
+
+ def test_load_waves_data_b(self):
+ w_vars = pyspedas.stereo.waves(trange=['2013-11-5', '2013-11-6'], probe='b')
+ self.assertTrue(data_exists('PSD_FLUX'))
+ self.assertTrue(data_exists('PSD_SFU'))
+
+ def test_load_beacon_data_a(self):
+ w_vars = pyspedas.stereo.beacon(trange=['2013-11-5', '2013-11-6'], probe='a')
+ self.assertTrue(data_exists('MAGBField'))
+
+ def test_load_beacon_data_b(self):
+ w_vars = pyspedas.stereo.beacon(trange=['2013-11-5', '2013-11-6'], probe='b')
+ self.assertTrue(data_exists('MAGBField'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/swarm/load.py b/pyspedas/swarm/load.py
index 57a5b33c..fdb1931a 100644
--- a/pyspedas/swarm/load.py
+++ b/pyspedas/swarm/load.py
@@ -1,5 +1,5 @@
from pyspedas import time_string, time_double
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pyspedas.hapi.hapi import hapi
from .config import CONFIG
diff --git a/pyspedas/swarm/tests/tests.py b/pyspedas/swarm/tests/tests.py
index 01bde3c8..6a062059 100644
--- a/pyspedas/swarm/tests/tests.py
+++ b/pyspedas/swarm/tests/tests.py
@@ -1,11 +1,16 @@
import unittest
-from pyspedas.utilities.data_exists import data_exists
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_load_mag_data(self):
- vfm_vars = pyspedas.swarm.mag(probe='c', trange=['2017-03-27/06:00', '2017-03-27/08:00'], datatype='hr')
+ vfm_vars = pyspedas.swarm.mag(probe='c',
+ trange=['2017-03-27/06:00', '2017-03-27/08:00'],
+ datatype='hr',
+ time_clip=True)
self.assertTrue(data_exists('swarmc_B_VFM'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/themis/__init__.py b/pyspedas/themis/__init__.py
index 6b5946d7..88f7d76d 100644
--- a/pyspedas/themis/__init__.py
+++ b/pyspedas/themis/__init__.py
@@ -15,4 +15,8 @@
from pyspedas.themis.ground.ask import ask
from pyspedas.themis.state.state import state
-from pyspedas.themis.state.slp import slp
\ No newline at end of file
+from pyspedas.themis.state.slp import slp
+
+from pyspedas.themis.state.autoload_support import autoload_support
+from pyspedas.themis.state import get_spinmodel
+from pyspedas.themis.cotrans import sse2sel,gse2sse,dsl2gse,ssl2dsl
\ No newline at end of file
diff --git a/pyspedas/themis/cotrans/__init__.py b/pyspedas/themis/cotrans/__init__.py
index e69de29b..961c571c 100644
--- a/pyspedas/themis/cotrans/__init__.py
+++ b/pyspedas/themis/cotrans/__init__.py
@@ -0,0 +1,4 @@
+from .dsl2gse import dsl2gse
+from .gse2sse import gse2sse
+from .sse2sel import sse2sel
+from .ssl2dsl import ssl2dsl
\ No newline at end of file
diff --git a/pyspedas/themis/cotrans/dsl2gse.py b/pyspedas/themis/cotrans/dsl2gse.py
index 350a2442..d55a8ca4 100644
--- a/pyspedas/themis/cotrans/dsl2gse.py
+++ b/pyspedas/themis/cotrans/dsl2gse.py
@@ -4,43 +4,76 @@
Works in a similar way to IDL spedas dsl2gse.pro
"""
-import pytplot
+import logging
+import numpy as np
+from copy import deepcopy
+#import pytplot
+
import pyspedas
from pyspedas.cotrans.cotrans_lib import subgei2gse
-import numpy as np
+from pytplot import data_exists, del_data, store_data, get_data, set_coords, get_coords
+from pyspedas.themis import autoload_support
-def dsl2gse(name_in, spinras, spindec, name_out, isgsetodsl=0):
+def dsl2gse(name_in: str, name_out: str, isgsetodsl: bool = False, ignore_input_coord: bool = False,
+ probe: str=None, use_spinaxis_corrections: bool=True) -> int:
"""Transform dsl to gse.
Parameters
----------
name_in: str
- Name of input pytplot variable (eg. 'tha_fgl_dsl')
- spinras: str
- Name of pytplot variable for spin (eg.'tha_spinras').
- spindec: str
- Name of pytplot variable for spin (eg.'tha_spinras').
+ Name of input pytplot variable (e.g. 'tha_fgl_dsl')
name_out: str
- Name of output pytplot variable (eg. 'tha_fgl_gse')
+ Name of output pytplot variable (e.g. 'tha_fgl_gse')
isgsetodsl: bool
- If 0 (default) then DSL to GSE.
- If 1, then GSE to DSL.
+ If False (default) then DSL to GSE.
+ If True, then GSE to DSL.
+ ignore_input_coord: bool
+ if False (default), do not check the input coordinate system
+ if True, fail and return 0 if input coordinate does not match the requested transform.
+ probe: str
+ Usually optional, if the probe can be inferred from the input variable name (e.g. tha_xxx_yyy).
+ Otherwise, one of ['a','b','c','d','e','f']
+ use_spinaxis_corrections: bool
+ If True (default), use spin axis corrections from V03 state CDFs when available.
+ If False, use the uncorrected spin axis variables.
Returns
-------
- 1 for sucessful completion.
+ 1 for successful completion.
"""
- all_names = pytplot.tplot_names()
- needed_vars = [name_in, spinras, spindec]
- c = [value for value in needed_vars if value in all_names]
- if len(c) < 3:
- print("Variables needed: " + str(needed_vars))
+ needed_vars = [name_in]
+ c = [value for value in needed_vars if data_exists(value)]
+ if len(c) < 1:
+ logging.error("Variables needed: " + str(needed_vars))
m = [value for value in needed_vars if value not in c]
- print("Variables missing: " + str(m))
- print("Please load missing variables.")
- return
+ logging.error("Variables missing: " + str(m))
+ logging.error("Please load missing variables.")
+ return 0
+
+ if probe is None:
+ probe = name_in[2]
+
+ if not ignore_input_coord:
+ in_coord = get_coords(name_in)
+ if in_coord is None:
+ in_coord = "None"
+ if isgsetodsl and (in_coord.lower() != 'gse'):
+ logging.error("GSE to DSL transform requested, but input coordinate system is " + in_coord)
+ return 0
+ if not isgsetodsl and (in_coord.lower() != 'dsl'):
+ logging.error("DSL to GSE transform requested, but input coordinate system is " + in_coord)
+ return 0
+
+ autoload_support(varname=name_in, probe=probe, spinaxis=True)
+
+ if use_spinaxis_corrections:
+ spinras = 'th' + probe + '_spinras_corrected'
+ spindec = 'th' + probe + '_spindec_corrected'
+ else:
+ spinras = 'th' + probe + '_spinras'
+ spindec = 'th' + probe + '_spindec'
# Interpolate spinras and spindec
spinnames_in = [spinras, spindec]
@@ -49,58 +82,78 @@ def dsl2gse(name_in, spinras, spindec, name_out, isgsetodsl=0):
hi_names = [hiras_name, hidec_name]
# If new names exist, delete the variables
- if hiras_name in pytplot.tplot_names():
- pytplot.del_data(hiras_name)
- if hidec_name in pytplot.tplot_names():
- pytplot.del_data(hidec_name)
+ if data_exists(hiras_name):
+ del_data(hiras_name)
+ if data_exists(hidec_name):
+ del_data(hidec_name)
pyspedas.tinterpol(spinnames_in, name_in, method="linear",
newname=hi_names, suffix='')
# Get data
- data_in = pytplot.get_data(name_in)
- data_ras = pytplot.get_data(hiras_name)
- data_dec = pytplot.get_data(hidec_name)
+ data_in = get_data(name_in)
+ meta_in = get_data(name_in, metadata=True)
+ meta_copy = deepcopy(meta_in)
+ data_ras = get_data(hiras_name)
+ data_dec = get_data(hidec_name)
# Make a unit vector that points along the spin axis
- spla = (90.0 - (data_dec[1])) * np.pi/180.0
- splo = data_ras[1] * np.pi/180.0
+ spla = (90.0 - (data_dec[1])) * np.pi / 180.0
+ splo = data_ras[1] * np.pi / 180.0
# spherical to cartesian
zscs0 = np.sin(spla) * np.cos(splo)
- zscs1 = np.sin(spla)*np.sin(splo)
+ zscs1 = np.sin(spla) * np.sin(splo)
zscs2 = np.cos(spla)
+ znorm = np.sqrt(zscs0 * zscs0 + zscs1 * zscs1 + zscs2 * zscs2)
+ zscs0 = np.divide(zscs0, znorm)
+ zscs1 = np.divide(zscs1, znorm)
+ zscs2 = np.divide(zscs2, znorm)
zscs = np.column_stack((zscs0, zscs1, zscs2))
# unit vector that points along the spin axis in GSE
trgse = subgei2gse(data_in[0], zscs)
- zgse = [trgse[:, 0], trgse[:, 1], trgse[:, 2]]
+ zgse = trgse
sun = [1.0, 0.0, 0.0]
- yscs = [zgse[1] * sun[2] - zgse[2] * sun[1],
- zgse[2] * sun[0] - zgse[0] * sun[2],
- zgse[0] * sun[1] - zgse[1] * sun[0]]
- yscsNorm = np.sqrt(yscs[0]**2.0 + yscs[1]**2.0 + yscs[2]**2.0)
- yscs = yscs/yscsNorm
- xscs = [yscs[1] * zgse[2] - yscs[2] * zgse[1],
- yscs[2] * zgse[0] - yscs[0] * zgse[2],
- yscs[0] * zgse[1] - yscs[1] * zgse[0]]
-
- if isgsetodsl == 0:
+ my_y = np.cross(zgse, sun)
+ ynorm = np.sqrt(my_y[:, 0] * my_y[:, 0] + my_y[:, 1] * my_y[:, 1] + my_y[:, 2] * my_y[:, 2])
+ my_y[:, 0] = np.divide(my_y[:, 0], ynorm)
+ my_y[:, 1] = np.divide(my_y[:, 1], ynorm)
+ my_y[:, 2] = np.divide(my_y[:, 2], ynorm)
+ my_x = np.cross(my_y, zgse)
+ xnorm = np.sqrt(my_x[:, 0] * my_x[:, 0] + my_x[:, 1] * my_x[:, 1] + my_x[:, 2] * my_x[:, 2])
+ my_x[:, 0] = np.divide(my_x[:, 0], xnorm)
+ my_x[:, 1] = np.divide(my_x[:, 1], xnorm)
+ my_x[:, 2] = np.divide(my_x[:, 2], xnorm)
+
+ yscs = np.column_stack((zgse[:, 1] * sun[2] - zgse[:, 2] * sun[1],
+ zgse[:, 2] * sun[0] - zgse[:, 0] * sun[2],
+ zgse[:, 0] * sun[1] - zgse[:, 1] * sun[0]))
+ # yscs_norm = np.sqrt(yscs[:,0] ** 2.0 + yscs[:,1] ** 2.0 + yscs[:,2] ** 2.0)
+ # yscs = np.divide(yscs, yscs_norm)
+ xscs = np.column_stack((yscs[:, 1] * zgse[:, 2] - yscs[:, 2] * zgse[:, 1],
+ yscs[:, 2] * zgse[:, 0] - yscs[:, 0] * zgse[:, 2],
+ yscs[:, 0] * zgse[:, 1] - yscs[:, 1] * zgse[:, 0]))
+
+ if not isgsetodsl:
# DSL -> GSE
dd = data_in[1]
- d0 = dd[:, 0] * xscs[0] + dd[:, 1] * yscs[0] + dd[:, 2] * zgse[0]
- d1 = dd[:, 0] * xscs[1] + dd[:, 1] * yscs[1] + dd[:, 2] * zgse[1]
- d2 = dd[:, 0] * xscs[2] + dd[:, 1] * yscs[2] + dd[:, 2] * zgse[2]
+ d0 = dd[:, 0] * my_x[:, 0] + dd[:, 1] * my_y[:, 0] + dd[:, 2] * zgse[:, 0]
+ d1 = dd[:, 0] * my_x[:, 1] + dd[:, 1] * my_y[:, 1] + dd[:, 2] * zgse[:, 1]
+ d2 = dd[:, 0] * my_x[:, 2] + dd[:, 1] * my_y[:, 2] + dd[:, 2] * zgse[:, 2]
+ out_coord = 'GSE'
else:
# GSE -> DSL
dd = data_in[1]
- d0 = dd[:, 0] * xscs[0] + dd[:, 1] * xscs[1] + dd[:, 2] * xscs[2]
- d1 = dd[:, 0] * yscs[0] + dd[:, 1] * yscs[1] + dd[:, 2] * yscs[2]
- d2 = dd[:, 0] * zgse[0] + dd[:, 1] * zgse[1] + dd[:, 2] * zgse[2]
+ d0 = dd[:, 0] * my_x[:, 0] + dd[:, 1] * my_x[:, 1] + dd[:, 2] * my_x[:, 2]
+ d1 = dd[:, 0] * my_y[:, 0] + dd[:, 1] * my_y[:, 1] + dd[:, 2] * my_y[:, 2]
+ d2 = dd[:, 0] * zgse[:, 0] + dd[:, 1] * zgse[:, 1] + dd[:, 2] * zgse[:, 2]
+ out_coord = 'DSL'
dd_out = [d0, d1, d2]
data_out = np.column_stack(dd_out)
- pytplot.store_data(name_out, data={'x': data_in[0], 'y': data_out})
+ store_data(name_out, data={'x': data_in[0], 'y': data_out}, attr_dict=meta_copy)
+ set_coords(name_out, out_coord)
return 1
diff --git a/pyspedas/themis/cotrans/gse2sse.py b/pyspedas/themis/cotrans/gse2sse.py
new file mode 100644
index 00000000..4970f690
--- /dev/null
+++ b/pyspedas/themis/cotrans/gse2sse.py
@@ -0,0 +1,176 @@
+"""Transform GSE data to SSE data.
+
+Notes:
+ Works in a similar way to IDL spedas dsl2gse.pro
+"""
+
+import logging
+import numpy as np
+from copy import deepcopy
+
+from pyspedas import tnormalize, tcrossp, tinterpol, deriv_data, cotrans, tvector_rotate
+from pyspedas.themis import autoload_support
+from pytplot import data_exists, get_data, store_data, get_coords, set_coords
+
+def gse2sse(name_in: str, name_out: str, isssetogse: bool = False,
+ variable_type: str = 'Other', ignore_input_coord: bool = False, rotation_only: bool = False) -> int:
+
+ """Transform gse to sse.
+
+ Parameters
+ ----------
+ name_in: str
+ Name of input pytplot variable (e.g. 'tha_fgl_dsl')
+ name_out: str
+ Name of output pytplot variable (e.g. 'tha_fgl_sse')
+ isssetogse: bool
+ If False (default), then GSE to SSE.
+ If True, then SSE to GSE.
+ ignore_input_coord: bool
+ if False (default), do not check the input coordinate system
+ if True, fail and return 0 if input coordinate does not match the requested transform.
+ variable_type: str
+ A string describing the type of data being transformed. If value is "pos" or "vel", the appropriate
+ offsets (lunar position or velocity) are applied during the transform. Any other value will be treated
+ as equivalent to rotate_only=True.
+ rotation_only: bool
+ if False (default), assume input variable is a position with units of km, and apply the earth-moon
+ offset before rotating to SSE, or after rotating to GSE
+ if True, assume the input variable is a velocity or some other quantity that does not need the earth-moon
+ translation step
+
+ Returns
+ -------
+ 1 for sucessful completion.
+ """
+
+ needed_vars = [name_in]
+ c = [value for value in needed_vars if data_exists(value)]
+ if len(c) < 1:
+ logging.error("Variables needed: " + str(needed_vars))
+ m = [value for value in needed_vars if value not in c]
+ logging.error("Variables missing: " + str(m))
+ logging.error("Please load missing variables.")
+ return 0
+
+ # load support data
+ autoload_support(varname=name_in, slp=True)
+ name_sun_pos = 'slp_sun_pos'
+ name_lun_pos = 'slp_lun_pos'
+
+ if not ignore_input_coord:
+ # check input coord
+ in_coord = get_coords(name_in)
+ if in_coord is None:
+ in_coord = "None"
+ if not isssetogse and (in_coord.lower() != 'gse'):
+ logging.error("GSE to SSE transform requested, but input coordinate system is " + in_coord)
+ return 0
+ if isssetogse and (in_coord.lower() != 'sse'):
+ logging.error("SSE to GSE transform requested, but input coordinate system is " + in_coord)
+ return 0
+
+ # check sun pos coord
+ sun_pos_coord = get_coords(name_sun_pos)
+ if sun_pos_coord is None:
+ sun_pos_coord = "None"
+ if sun_pos_coord.lower() != 'gse':
+ logging.info('Transforming %s to GSE',name_sun_pos)
+ sun_pos_gse_name = name_sun_pos+'_gse'
+ cotrans(name_in=name_sun_pos,name_out=sun_pos_gse_name,coord_out='GSE')
+ else:
+ sun_pos_gse_name=name_sun_pos
+
+ # check lun pos coord
+ lun_pos_coord = get_coords(name_lun_pos)
+ if lun_pos_coord is None:
+ lun_pos_coord = "None"
+ if lun_pos_coord.lower() != 'gse':
+ logging.info('Transforming %s to GSE', name_lun_pos)
+ lun_pos_gse_name = name_lun_pos + '_gse'
+ cotrans(name_in=name_lun_pos, name_out=lun_pos_gse_name, coord_out='GSE')
+ else:
+ lun_pos_gse_name = name_lun_pos
+ else:
+ sun_pos_gse_name=name_sun_pos
+ lun_pos_gse_name=name_lun_pos
+
+ meta_copy = deepcopy(get_data(name_in, metadata=True))
+
+ # Make rotation matrix
+ sunpos = get_data(sun_pos_gse_name)
+ lunpos = get_data(lun_pos_gse_name)
+ sun_pos_dim = sunpos.y.shape
+
+ # Moon to sun vector = sunpos - lunpos
+ lun_sun_vec = sunpos.y - lunpos.y
+
+ # SSE X-axis: unit vector from sun toward moon
+ sse_x = tnormalize(lun_sun_vec, return_data=True)
+ # SSE Y-axis: ecliptic north unit vector (0,0,1) cross SSE-X
+ ecliptic_north = np.repeat(np.array([[0, 0, 1]]), sun_pos_dim[0], axis=0)
+ sse_y = tcrossp(ecliptic_north,sse_x, return_data=True)
+ # SSE Z-axis: SSE-X cross SSE-Y (not necessarily ecliptic north)
+ sse_z = tcrossp(sse_x, sse_y, return_data=True)
+
+ # Make rotation matrix from basis vectors, store in tplot variable
+
+ out_data = np.zeros((sun_pos_dim[0], 3, 3))
+ if not isssetogse:
+ out_data[:,0,:] = sse_x
+ out_data[:,1,:] = sse_y
+ out_data[:,2,:] = sse_z
+ else:
+ # Invert sense of conversion by transposing rotation array
+ out_data[:,:,0] = sse_x
+ out_data[:,:,1] = sse_y
+ out_data[:,:,2] = sse_z
+
+ store_data('sse_mat_cotrans', data={'x': sunpos.times, 'y': out_data})
+
+ if variable_type.lower() == "pos" and not rotation_only:
+ tinterpol(lun_pos_gse_name,name_in,newname='gse2sse_offset')
+ gse2sse_offset_data = get_data('gse2sse_offset')
+ elif variable_type.lower() == "vel" and not rotation_only:
+ deriv_data(lun_pos_gse_name,new_names='gse2sse_lun_vel')
+ tinterpol('gse2sse_lun_vel',name_in,newname='gse2sse_offset')
+ gse2sse_offset_data = get_data('gse2sse_offset')
+ else:
+ logging.info("No offsets performed for variable type %s",variable_type)
+ rotation_only = True
+
+ if not isssetogse:
+ """ GSE -> SSE
+ """
+
+ if not rotation_only:
+ logging.info("Applying earth-moon %s offset to input variable %s",variable_type,name_in)
+ input_pos = get_data(name_in)
+ translated_pos = input_pos.y - gse2sse_offset_data.y
+ name_trans = name_in + '_trans'
+ store_data(name_trans,data={'x':input_pos.times, 'y':translated_pos},attr_dict=meta_copy)
+ tvector_rotate('sse_mat_cotrans',name_trans,newname=name_out)
+ else:
+ logging.info("No earth-moon offsets applied")
+ tvector_rotate('sse_mat_cotrans',name_in,newname=name_out)
+
+ set_coords(name_out,'SSE')
+ return 1
+
+ else:
+ """ SSE -> GSE
+ """
+ if not rotation_only:
+ tvector_rotate('sse_mat_cotrans',name_in,newname='gse2sse_rotated')
+ logging.info("Applying moon-earth %s offset to rotated variable %s",variable_type,'gse2sse_rotated')
+ rotated_data = get_data('gse2sse_rotated')
+ earth_data = rotated_data.y + gse2sse_offset_data.y
+ store_data(name_out,data={'x':rotated_data.times,'y':earth_data},attr_dict=meta_copy)
+ else:
+ logging.info("No earth-moon offsets applied")
+ tvector_rotate('sse_mat_cotrans',name_in,newname=name_out)
+
+ set_coords(name_out,'GSE')
+ return 1
+
+
diff --git a/pyspedas/themis/cotrans/sse2sel.py b/pyspedas/themis/cotrans/sse2sel.py
new file mode 100644
index 00000000..fdfb6da3
--- /dev/null
+++ b/pyspedas/themis/cotrans/sse2sel.py
@@ -0,0 +1,158 @@
+"""Transformation SSE data to SEL data.
+Notes:
+ Works in a similar way to IDL spedas sse2sel.pro
+"""
+import logging
+import numpy as np
+import pytplot
+from pyspedas import tnormalize, tcrossp
+from pyspedas.cotrans.tvector_rotate import tvector_rotate
+from pytplot import data_exists, get_coords, set_coords
+from pyspedas.cotrans.cotrans import cotrans
+from pyspedas.analysis.tinterpol import tinterpol
+from pyspedas.analysis.deriv_data import deriv_data
+from pyspedas.themis.cotrans.gse2sse import gse2sse
+from pyspedas.themis import autoload_support
+
+def sse2sel(name_in: str, name_out: str, isseltosse: bool = False, ignore_input_coord: bool = False) -> int:
+ """Transform sse to sel.
+ Parameters
+ ----------
+ name_in: str
+ Name of input pytplot variable (e.g. 'tha_state_pos_sse' or 'tha_state_pos_sel')
+ name_out: str
+ Name of output pytplot variable (e.g. 'tha_fgl_sse')
+ isseltosse: bool
+ If False (default), then SSE to SEL.
+ If True, then SEL to SSE.
+ ignore_input_coord: bool
+ if False (default), do not check the input coordinate system
+ if True, fail and return 0 if input coordinate does not match the requested transform.
+ variable_type: str
+ A string describing the type of data being transformed. If value is "pos" or "vel", the appropriate
+ offsets (lunar position or velocity) are applied during the transform. Any other value will be treated
+ as equivalent to rotate_only=True.
+ rotate_only: bool
+ if False (default), assume input variable is a position with units of km, and apply the earth-moon
+ offset before rotating to SEL, or after rotating to SSE #TODO: please check
+ if True, assume the input variable is a velocity or some other quantity that does not need the earth-moon
+ translation step
+ Returns
+ -------
+ 1 for sucessful completion.
+ """
+ needed_vars = [name_in]
+ c = [value for value in needed_vars if data_exists(value)]
+ if len(c) < 1:
+ logging.error("Variables needed: " + str(needed_vars))
+ m = [value for value in needed_vars if value not in c]
+ logging.error("Variables missing: " + str(m))
+ logging.error("Please load missing variables.")
+ return 0
+
+ autoload_support(varname=name_in,slp=True)
+ name_sun_pos = 'slp_sun_pos'
+ name_lun_pos = 'slp_lun_pos'
+ name_lun_att_x = 'slp_lun_att_x'
+ name_lun_att_z = 'slp_lun_att_z'
+
+ if not ignore_input_coord:
+ # check input coord
+ in_coord = get_coords(name_in)
+ if in_coord is None:
+ in_coord = "None"
+ if not isseltosse and (in_coord.lower() != 'sse'):
+ logging.error("SSE to SEL transform requested, but input coordinate system is " + in_coord)
+ return 0
+ if isseltosse and (in_coord.lower() != 'sel'):
+ logging.error("SEL to SSE transform requested, but input coordinate system is " + in_coord)
+ return 0
+
+ # check sun pos coord
+ sun_pos_coord = get_coords(name_sun_pos)
+ if sun_pos_coord is None:
+ sun_pos_coord = "None"
+ if sun_pos_coord.lower() != 'gse':
+ logging.info('Transforming %s to GSE',name_sun_pos)
+ sun_pos_gse_name = name_sun_pos+'_gse'
+ cotrans(name_in=name_sun_pos,name_out=sun_pos_gse_name,coord_out='GSE')
+ else:
+ sun_pos_gse_name=name_sun_pos
+ # check lun pos coord
+ lun_pos_coord = get_coords(name_lun_pos)
+ if lun_pos_coord is None:
+ lun_pos_coord = "None"
+ if lun_pos_coord.lower() != 'gse':
+ logging.info('Transforming %s to GSE', name_lun_pos)
+ lun_pos_gse_name = name_lun_pos + '_gse'
+ cotrans(name_in=name_lun_pos, name_out=lun_pos_gse_name, coord_out='GSE')
+ else:
+ lun_pos_gse_name = name_lun_pos
+ # check lun att x coord
+ lun_att_x_coord = get_coords(name_lun_att_x)
+ if lun_att_x_coord is None:
+ lun_att_x_coord = "None"
+ if lun_att_x_coord.lower() != 'gse':
+ logging.info('Transforming %s to GSE', name_lun_att_x)
+ lun_att_x_gse_name = name_lun_att_x + '_gse'
+ cotrans(name_in=name_lun_att_x, name_out=lun_att_x_gse_name, coord_out='GSE')
+ else:
+ lun_att_x_gse_name = name_lun_att_x
+ # check lun att z coord
+ lun_att_z_coord = get_coords(name_lun_att_z)
+ if lun_att_z_coord is None:
+ lun_att_z_coord = "None"
+ if lun_att_z_coord.lower() != 'gse':
+ logging.info('Transforming %s to GSE', name_lun_att_z)
+ lun_att_z_gse_name = name_lun_att_z + '_gse'
+ cotrans(name_in=name_lun_att_z, name_out=lun_att_z_gse_name, coord_out='GSE')
+ else:
+ lun_att_z_gse_name = name_lun_att_z
+ else:
+ sun_pos_gse_name = name_sun_pos
+ lun_pos_gse_name = name_lun_pos
+ lun_att_x_gse_name = name_lun_att_x
+ lun_att_z_gse_name = name_lun_att_z
+ # Make rotation matrix
+ sunpos = pytplot.get_data(sun_pos_gse_name)
+ sun_pos_dim = sunpos.y.shape
+
+ # X basis vector
+ result = gse2sse(lun_att_x_gse_name, 'sel_x_sse', rotation_only=True)
+ sel_x_sse = pytplot.get_data('sel_x_sse')
+ x_axis = sel_x_sse.y
+
+ # Z basis vector
+ result = gse2sse(lun_att_z_gse_name, 'sel_z_sse', rotation_only=True)
+ sel_z_sse = pytplot.get_data('sel_z_sse')
+ z_axis = sel_z_sse.y
+
+ # Y basis vector
+ tcrossp('sel_z_sse', 'sel_x_sse', newname='sel_y_sse')
+ sel_y_sse = pytplot.get_data('sel_y_sse')
+ y_axis = sel_y_sse.y
+
+ out_data = np.zeros((sun_pos_dim[0], 3, 3))
+ if not isseltosse:
+ out_data[:,0,:] = x_axis
+ out_data[:,1,:] = y_axis
+ out_data[:,2,:] = z_axis
+ else:
+ # Invert sense of conversion by transposing rotation array
+ out_data[:,:,0] = x_axis
+ out_data[:,:,1] = y_axis
+ out_data[:,:,2] = z_axis
+ pytplot.store_data('sel_mat_cotrans', data={'x': sunpos.times, 'y': out_data})
+ if not isseltosse:
+ """ SSE -> SEL
+ """
+ tvector_rotate('sel_mat_cotrans',name_in,newname=name_out)
+ set_coords(name_out,'SEL')
+ return 1
+
+ else:
+ """ SEL -> SSE
+ """
+ tvector_rotate('sel_mat_cotrans',name_in,newname=name_out)
+ set_coords(name_out,'SSE')
+ return 1
\ No newline at end of file
diff --git a/pyspedas/themis/cotrans/ssl2dsl.py b/pyspedas/themis/cotrans/ssl2dsl.py
new file mode 100644
index 00000000..4ff6b3cb
--- /dev/null
+++ b/pyspedas/themis/cotrans/ssl2dsl.py
@@ -0,0 +1,115 @@
+"""Transform SSL data to DSL data.
+
+Notes:
+ Works in a similar way to IDL spedas ssl2gse.pro
+"""
+
+import logging
+from math import pi
+import numpy as np
+from copy import deepcopy
+
+from pytplot import get_data, store_data, data_exists, get_coords, set_coords
+from pyspedas.themis.state import Spinmodel,get_spinmodel
+from pyspedas.themis import autoload_support
+
+
+def ssl2dsl(name_in: str, name_out: str, isdsltossl: bool = False, ignore_input_coord: bool = False,
+ probe: str=None, use_spinphase_correction: bool=True, eclipse_correction_level: int=0) -> int:
+ """Transform ssl to dsl.
+
+ Parameters
+ ----------
+ name_in: str
+ Name of input pytplot variable (e.g. 'tha_fgl_ssl')
+ name_out: str
+ Name of output pytplot variable (e.g. 'tha_fgl_dsl')
+ isdsltossl: bool
+ If 0 (default) then SSL to DSL.
+ If 1, then DSL to SSL.
+ ignore_input_coord: bool
+ if False (default), then fail and return 0 if input coordinate system does not match requested transform
+ if True, do not check input coordinate system.
+ probe: str
+ Usually optional, if the variable name is prefixed with 'tha', 'thb', etc.
+ Otherwise, one of ['a','b','c','d','e','f']
+ use_spinphase_correction: bool
+ If True (default), use spin phase corrections from V03 STATE CDF
+ if False, omit this
+ eclipse_correction_level: int
+ Specify which of the three available spin models to use for this transform
+ 0: (default) No eclipse correction
+ 1: Eclipse corrections for waveform data
+ 2: Eclipse corrections for particles and spin fits (includes additional angular offset)
+
+ Returns
+ -------
+ 1 for successful completion.
+
+ """
+ needed_vars = [name_in]
+ c = [value for value in needed_vars if data_exists(value)]
+ if len(c) < 1:
+ logging.error("Variables needed: " + str(needed_vars))
+ m = [value for value in needed_vars if value not in c]
+ logging.error("Variables missing: " + str(m))
+ logging.error("Please load missing variables.")
+ return 0
+
+ if probe is None:
+ probe=name_in[2]
+
+ autoload_support(varname=name_in, probe=probe, spinmodel=True)
+ spinmodel_obj=get_spinmodel(probe=probe, correction_level=eclipse_correction_level)
+
+ if not ignore_input_coord:
+ in_coord=get_coords(name_in)
+ if in_coord is None:
+ in_coord = "None"
+ if isdsltossl and (in_coord.upper() != 'DSL'):
+ logging.error("DSL to SSL transform requested, but input coordinate system is " + in_coord)
+ return 0
+ if not isdsltossl and (in_coord.upper() != 'SSL'):
+ logging.error("SSL to DSL transform requested, but input coordinate system is " + in_coord)
+ return 0
+
+ # Get data
+ result = get_data(name_in)
+ in_times = result.times
+ data_in = result.y
+ metadata = get_data(name_in, metadata=1)
+ meta_copy = deepcopy(metadata)
+
+ logging.info('Using spin model to calculate phase versus time...')
+ result = spinmodel_obj.interp_t(in_times, use_spinphase_correction=use_spinphase_correction)
+ spinmodel_phase = result.spinphase * pi / 180.0
+ phase = spinmodel_phase
+ d0 = data_in[:, 0]
+ d1 = data_in[:, 1]
+ d2 = data_in[:, 2]
+ out_d2 = d2
+
+ # if isdsltossl == 0:
+ # # despin
+ # out_d0 = d0 * np.cos(phase) - d1 * np.sin(phase)
+ # out_d1 = d0 * np.sin(phase) + d1 * np.cos(phase)
+ # else:
+ # # spin
+ # out_d0 = d0 * np.cos(phase) + d1 * np.sin(phase)
+ # out_d1 = -d0 * np.sin(phase) + d1 * np.cos(phase)
+
+ out_coord = 'DSL'
+ if isdsltossl:
+ # despin
+ phase = -1.0*phase
+ out_coord = 'SSL'
+
+ out_d0 = d0 * np.cos(phase) - d1 * np.sin(phase)
+ out_d1 = d0 * np.sin(phase) + d1 * np.cos(phase)
+
+ dd_out = [out_d0, out_d1, out_d2]
+ data_out = np.column_stack(dd_out)
+ store_data(name_out, data={'x': in_times, 'y': data_out}, attr_dict=meta_copy)
+ set_coords(name_out,out_coord)
+
+ return 1
diff --git a/pyspedas/themis/ground/gmag.py b/pyspedas/themis/ground/gmag.py
index 37b379f3..0cc27a3f 100644
--- a/pyspedas/themis/ground/gmag.py
+++ b/pyspedas/themis/ground/gmag.py
@@ -1,4 +1,4 @@
-
+import logging
import requests
from pyspedas.themis.load import load
@@ -183,7 +183,7 @@ def gmag_list(group='all'):
station_group = get_group(station_name)
if group in ['all', '*', ''] or group in station_group:
station_list.append(station_name)
- print(station_name + ": from " + station['day_first'] + " to "
+ logging.info(station_name + ": from " + station['day_first'] + " to "
+ station['day_last'])
return station_list
@@ -210,9 +210,8 @@ def gmag_groups():
group_dict[key].append(station['ccode'].lower())
# print them
- print()
for g, s in group_dict.items():
- print(g + ":" + ",'".join(s) + "'")
+ logging.info(g + ":" + ",'".join(s) + "'")
return group_dict
diff --git a/pyspedas/themis/load.py b/pyspedas/themis/load.py
index 123f1383..285fd60e 100644
--- a/pyspedas/themis/load.py
+++ b/pyspedas/themis/load.py
@@ -1,7 +1,7 @@
import logging
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/themis/spacecraft/fields/fit.py b/pyspedas/themis/spacecraft/fields/fit.py
index fc4ca797..388e6652 100644
--- a/pyspedas/themis/spacecraft/fields/fit.py
+++ b/pyspedas/themis/spacecraft/fields/fit.py
@@ -92,7 +92,7 @@ def cal_fit(probe='a', no_cal=False):
from pytplot import get_data, store_data, tplot_names, options
from pyspedas.utilities.download import download
from pyspedas.themis.config import CONFIG
- from pyspedas.utilities.time_double import time_float_one
+ from pytplot import time_float_one
from copy import deepcopy
from numpy.linalg import inv
diff --git a/pyspedas/themis/state/__init__.py b/pyspedas/themis/state/__init__.py
index e69de29b..0a674cea 100644
--- a/pyspedas/themis/state/__init__.py
+++ b/pyspedas/themis/state/__init__.py
@@ -0,0 +1 @@
+from .spinmodel.spinmodel import Spinmodel, get_spinmodel, save_spinmodel
\ No newline at end of file
diff --git a/pyspedas/themis/state/apply_spinaxis_corrections.py b/pyspedas/themis/state/apply_spinaxis_corrections.py
new file mode 100644
index 00000000..12fc6606
--- /dev/null
+++ b/pyspedas/themis/state/apply_spinaxis_corrections.py
@@ -0,0 +1,87 @@
+import logging
+from pytplot import get_data, store_data, tplot_copy
+import numpy as np
+from pytplot import data_exists
+
+
+def apply_oneaxis_correction(rawvar: str,
+ deltavar: str,
+ corrvar: str):
+ """
+ This function applies spin axis corrections for a single spin axis quantity (RA or DEC).
+
+ Parameters:
+ rawvar: str
+ Name of tplot variable holding the uncorrected quantity of interest.
+
+ deltavar: str
+ Name of tplot variable holding the corrections for the quantity of interest
+
+ corrvar: str
+ Name of tplot variable to receive the corrected quantity
+ """
+
+ if (data_exists(rawvar)) and (data_exists(deltavar)):
+ logging.debug("Applying spin axis corrections")
+ res1 = get_data(rawvar)
+ raw_times = res1.times
+ raw_data = res1.y
+ metadata = get_data(rawvar, metadata=True)
+
+ res2 = get_data(deltavar)
+ corr_times = res2.times
+ corr_data = res2.y
+
+ # Interpolate corrections using input_times
+
+ # In IDL, there was a special case, if the correction had only a single quantity, because the
+ # interpol routine would crash with only a single value given. np.interp does the right thing
+ # by default.
+
+ interp_correction = np.interp(raw_times, corr_times, corr_data)
+
+ # Apply corrections
+ fix_raw = raw_data - interp_correction
+
+ # Make output tplot variable
+ store_data(corrvar, data={'x': raw_times, 'y': fix_raw}, attr_dict=metadata)
+ elif not (data_exists(deltavar)):
+ # If the corrections aren't present, just copy the original data
+ logging.info('Spin axis corrections variable ' + deltavar + ' not found, copying ' + rawvar + ' to ' + corrvar)
+ tplot_copy(rawvar,corrvar)
+ else:
+ # Raw variable doesn't exist, nothing to do here.
+ logging.info('Spin axis variable ' + rawvar + ' not found, skipping ' + corrvar)
+
+
+def apply_spinaxis_corrections(spinras: str,
+ spindec: str,
+ delta_spinras: str,
+ delta_spindec: str,
+ corrected_spinras: str,
+ corrected_spindec: str):
+ """
+ This function applies V03 state spin axis corrections (if present) to state spinras and spindec
+ quantities, creating new tplot variables.
+
+ Parameters:
+ spinras: str
+ Name of tplot variable holding the uncorrected spin axis right ascension.
+
+ spindec: str
+ Name of tplot variable holding the uncorrected spin axis declination.
+
+ delta_spinras: str
+ Name of tplot variable holding the correction to the spin axis right ascension.
+
+ delta_spindec: str
+ Name of tplot variable holding the corrections to the spin axis declination.
+
+ corrected_spinras: str
+ Name of tplot variable to receive the corrected spin axis right ascension.
+
+ corrected_spindec: str
+ Name of tplot variable to receive the corrected spin axis declination.
+ """
+ apply_oneaxis_correction(rawvar=spinras, deltavar=delta_spinras, corrvar=corrected_spinras)
+ apply_oneaxis_correction(rawvar=spindec, deltavar=delta_spindec, corrvar=corrected_spindec)
diff --git a/pyspedas/themis/state/autoload_support.py b/pyspedas/themis/state/autoload_support.py
new file mode 100644
index 00000000..bdd89059
--- /dev/null
+++ b/pyspedas/themis/state/autoload_support.py
@@ -0,0 +1,173 @@
+import logging
+import pyspedas
+import pyspedas.themis
+from pytplot import data_exists
+from pytplot import time_double
+from pyspedas.themis.state.spinmodel.spinmodel import get_spinmodel
+import pytplot
+
+
+def load_needed(trange_loaded,
+ trange_needed,
+ tolerance=0.0):
+ """
+ Given a time range of loaded data, and the time range needed, determine if
+ a support variable needs to be reloaded. A small amount of extrapolation is considered
+ acceptable, so the time comparisons are done with a user-specified tolerance.
+
+ Parameters:
+ trange_loaded: list of float
+ Start and end times (Unix times) for support variable currently loaded.
+
+ trange_needed: list of float
+ Start and end times (Unix times) for support variable needed to support desired operation.
+
+ tolerance: float
+ A duration, in seconds, for which extrapolation from currently loaded data is considered valid.
+ """
+ st = trange_loaded[0] - tolerance
+ et = trange_loaded[1] + tolerance
+ if (trange_needed[0] > st) and (trange_needed[1] < et):
+ return False
+ else:
+ return True
+
+
+def autoload_support(varname=None,
+ trange=None,
+ probe=None,
+ spinaxis: bool = False,
+ spinmodel: bool = False,
+ slp: bool = False):
+ """
+ Automatically load THEMIS support data required to cover a given probe or time range.
+
+ Parameters:
+ varname: str (optional)
+ Name of a tplot variable for which calibration, cotrans, or other operation requiring
+ support data is needed. The trange and probe arguments are optional if this argument
+ is provided.
+
+ trange: list of Unix times or time strings
+ Start and end times (in either string or Unix time format) for which support data is needed.
+ Required if varname not specified.
+
+ probe: str (optional)
+ A single letter probe identifier. Required if varname not specified, and spinaxis or spinmodel
+ support data is requested.
+
+ spinaxis: bool
+ If True, the spin axis variables from the state CDF are examined to see if state needs to
+ be reloaded.
+
+ spinmodel: bool
+ If True, the spin model produced from the state CDF is examined to see if state needs to
+ be reloaded.
+
+ slp: bool
+ If True, the tplot variables holding sun and moon positions and lunar coordinate system axes
+ are examined to see if the SLP data needs to be reloaded.
+
+
+ """
+ if varname is not None:
+ trange = pytplot.get_timespan(varname)
+ elif trange is None:
+ logging.error("Must specify either a tplot name or a time range in order to load support data")
+ return
+ elif (probe is None) and (spinaxis or spinmodel):
+ logging.error("Must specify either a tplot name or a probe in order to load spin axis or spin model data")
+ return
+
+ # Validate varname if present
+ if (varname is not None) and not (data_exists(varname)):
+ logging.error("tplot variable name " + varname + " not found.")
+ return
+
+ # Set probe name (if needed)
+ if spinaxis or spinmodel:
+ if probe is None:
+ probe = varname[2]
+
+ # Set time range (if needed)
+ if trange is None:
+ trange_needed = pytplot.get_timespan(varname)
+ else:
+ if isinstance(trange[0], str):
+ trange_needed = time_double(trange)
+ else:
+ trange_needed = trange
+
+ do_state = False
+ do_slp = False
+ slop = 120.0 # Tolerance (seconds) for determining if existing data covers desired range
+
+ # Does spin model cover desired time interval?
+ if spinmodel:
+ sm = get_spinmodel(probe, correction_level=1, quiet=True)
+ if sm is None:
+ do_state = True
+ else:
+ sminfo = sm.get_timerange()
+ trange_loaded = sminfo
+ if load_needed(trange_loaded, trange_needed, tolerance=slop):
+ do_state = True
+
+ # Do spin axis variables exist, and cover desired time interval?
+
+ if spinaxis:
+ v1 = "th" + probe + "_spinras"
+ v2 = "th" + probe + "_spindec"
+ v3 = "th" + probe + "_spinras_corrected"
+ v4 = "th" + probe + "_spindec_corrected"
+
+ # Check uncorrected variables
+ if not (data_exists(v1)) or not (data_exists(v2)):
+ do_state = True
+ else:
+ v1_tr = pytplot.get_timespan(v1)
+ v2_tr = pytplot.get_timespan(v2)
+ if (load_needed(v1_tr, trange_needed, tolerance=slop) or
+ load_needed(v2_tr, trange_needed, tolerance=slop)):
+ do_state = True
+
+ # Check corrected variables. They may be unavailable even if state is reloaded,
+ # so only force a reload if one of the variables exists, but doesn't cover the
+ # needed time range.
+
+ if data_exists(v3):
+ v3_tr = pytplot.get_timespan(v3)
+ if load_needed(v3_tr, trange_needed, tolerance=slop):
+ do_state = True
+
+ if data_exists(v4):
+ v4_tr = pytplot.get_timespan(v4)
+ if load_needed(v4_tr, trange_needed, tolerance=slop):
+ do_state = True
+
+ # Check SLP variables. They must all exist, and cover the desired time range, or
+ # reload is necessary.
+
+ if slp:
+ v1 = 'slp_lun_att_x'
+ v2 = 'slp_lun_att_z'
+ v3 = 'slp_lun_pos'
+ v4 = 'slp_sun_pos'
+ if not (data_exists(v1) and data_exists(v2) and data_exists(v3) and data_exists(v4)):
+ do_slp = True
+ else:
+ v1_tr = pytplot.get_timespan(v1)
+ v2_tr = pytplot.get_timespan(v2)
+ v3_tr = pytplot.get_timespan(v3)
+ v4_tr = pytplot.get_timespan(v4)
+ if (load_needed(v1_tr, trange_needed, tolerance=slop)
+ or load_needed(v2_tr, trange_needed, tolerance=slop)
+ or load_needed(v3_tr, trange_needed, tolerance=slop)
+ or load_needed(v4_tr, trange_needed, tolerance=slop)):
+ do_slp = True
+
+ # Perform the needed updates
+ if do_slp:
+ pyspedas.themis.slp(trange=trange_needed)
+ if do_state:
+ pyspedas.themis.state(probe=probe, trange=trange_needed, get_support_data=True)
diff --git a/pyspedas/themis/state/slp.py b/pyspedas/themis/state/slp.py
index 27887f94..2c034703 100644
--- a/pyspedas/themis/state/slp.py
+++ b/pyspedas/themis/state/slp.py
@@ -1,5 +1,6 @@
-
+import logging
from pyspedas.themis.load import load
+from pytplot import set_coords
def slp(trange=['2007-03-23', '2007-03-24'],
@@ -59,8 +60,18 @@ def slp(trange=['2007-03-23', '2007-03-24'],
List of tplot variables created.
"""
- return load(instrument='slp', trange=trange, level=level,
+ retval = load(instrument='slp', trange=trange, level=level,
suffix=suffix, get_support_data=get_support_data,
varformat=varformat, varnames=varnames,
downloadonly=downloadonly, notplot=notplot,
time_clip=time_clip, no_update=no_update)
+
+ if not downloadonly:
+ # Coordinate system is not set in the data CDFs, so setting it here for now.
+ # Everything except for the light travel time variables is in GEI true-of-date.
+ for varname in retval:
+ if not "ltime" in varname:
+ logging.debug("Setting %s to GEI coordinates", varname)
+ set_coords(varname,"GEI")
+
+ return retval
diff --git a/pyspedas/themis/state/spinmodel/__init__.py b/pyspedas/themis/state/spinmodel/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/themis/state/spinmodel/spinmodel.py b/pyspedas/themis/state/spinmodel/spinmodel.py
new file mode 100644
index 00000000..54812326
--- /dev/null
+++ b/pyspedas/themis/state/spinmodel/spinmodel.py
@@ -0,0 +1,669 @@
+import numpy as np
+import math
+import logging
+from typing import Dict
+from .spinmodel_segment import SpinmodelSegment
+from pytplot import get_data, store_data
+from pytplot import data_exists
+from pytplot import time_string
+
+
+def get_sm_data(probe: str,
+ valname: str,
+ correction_level: int) -> (np.ndarray, np.ndarray):
+ """ Return the times and values for a spin model tplot variable
+
+ Args:
+ probe: Probe name, one of 'a','b','c','d','e'
+ valname: The data quantity to be returned
+ correction_level: 0 for no corrections, 1 for waveform corrections, 2 for spin fit corrections
+
+ Returns:
+ A tuple containing the timestamps and data values as np.ndarray objects
+ """
+ if correction_level == 0:
+ infix = '_'
+ else:
+ infix = '_ecl_'
+ tvar_name = 'th' + probe + '_spin' + infix + valname
+ if data_exists(tvar_name):
+ res = get_data(tvar_name)
+ return res.times, res.y
+ else:
+ return None
+
+
+class SpinmodelInterpTResult:
+ """ An object to return the results of interpolating a spin model.
+
+ Attributes:
+ spinphase (ndarray(dtype=float)): The spin phase (in degrees) at each input time
+ spincount (ndarray(dtype=int)): The count of complete spins from the start of the model, at each input time
+ spinper (ndarray(dtype=float)): The spin period (in seconds) at each input time
+ t_last (ndarray(dtype=float)): Time (seconds since epoch) of last sun sensor crossing before the input times
+ eclipse_delta_phi (ndarray(dtype=float)): The offset in degrees between the modeled spin phase and the onboard
+ spin phase during an eclipse
+ segflags (ndarray(dtype=int)): A set of bit flags denoting the eclipse and correction status at each input time
+ idx (ndarray(dtype=int)): The index of the spin model segment containing each input time
+ dt (ndarray(dtype=float)): The delta-t in seconds between the input times and start or end time of the
+ containing segment
+
+ """
+
+ __slots__ = 'spinphase', 'spincount', 'spinper', 't_last', 'eclipse_delta_phi', 'segflags', 'idx', 'dt'
+
+ def __init__(self, spinphase: np.ndarray,
+ spincount: np.ndarray,
+ spinper: np.ndarray,
+ t_last: np.ndarray,
+ eclipse_delta_phi: np.ndarray,
+ segflags: np.ndarray,
+ idx: np.ndarray,
+ dt: np.ndarray):
+ self.spinphase = spinphase
+ self.spincount = spincount
+ self.spinper = spinper
+ self.t_last = t_last
+ self.eclipse_delta_phi = eclipse_delta_phi
+ self.segflags = segflags
+ self.idx = idx
+ self.dt = dt
+
+
+class Spinmodel:
+ """ An object describing a spin model for a given probe, time interval, and correction level
+ A spinmodel is created from a set of tplot variables loaded from the THEMIS STATE CDFs. The time interval is
+ covered by a list of SpinmodelSegment objects, representing a piecewise quadratic fit of phi (spin plane angle)
+ versus time. The segment data from the CDF is loaded into the seg_list during initial creation of the model, with
+ some on-the-fly adjustments to account for the brief gaps that occur at UTC date boundaries. The final segment
+ list is converted to a series of ndarrays, stored in the seg_* attributes, to support efficient interpolation to
+ input times.
+
+ Attributes:
+ seg_list (List of SpinmodelSegment): A list of spin model segment objects comprising the model
+ lastseg (SpinmodelSegment): The most recently processed SpinmodelSegment
+ seg_times (ndarray): Array of t1 values from seg_list
+ seg_t2 (ndarray): Array of t2 values from seg_list
+ seg_c1 (ndarray): Array of c1 values from seg_list
+ seg_c2 (ndarray): Array of c2 values from seg_list
+ seg_b (ndarray): Array of b values from seg_list
+ seg_c (ndarray): Array of c valuse from seg_list
+ seg_npts (ndarray): Array of npts values from seg_list
+ seg_maxgap (ndarray): Array of maxgap values from seg_list
+ seg_phaserr (ndarray): Array of phaserr values from seg_list
+ seg_initial_delta_phi (ndarray): Array of initial_delta_phi values from seg_list
+ seg_idpu_spinper (ndarray): Array of idpu_spinper values from seg_list
+ seg_segflags (ndarray): Array of segflags values from seg_list
+ seg_count (int): Count of segments
+
+ """
+
+ def print_segs(self):
+ """ Print the list of SpinmodelSegment objects, useful for troubleshooting.
+
+ """
+
+ for seg in self.seg_list:
+ seg.print()
+
+ def adjust_delta_phi(self,
+ ecl_start: float,
+ ecl_end: float,
+ ecl_corr: float):
+ """ Apply the level 2 (spin fit) corrections to the segments falling in an eclipse time interval.
+
+ Args:
+ ecl_start (float): Start time (seconds since epoch) of an eclipse to be processed
+ ecl_end (float): End time (seconds since epoch) of an eclipse to be processed
+ ecl_corr (float): Eclipse delta-phi offset (degrees) between waveform and spin fit data for this eclipse
+ """
+
+ # A segment needs to be updated if its midpoint lies in the time range ecl_start to ecl_end.
+ # This is more robust against floating point roundoff errors than the previous method, which
+ # compared the segment endpoints rather than the midpoints.
+
+ seg_midpoints = (self.seg_times + self.seg_t2) / 2.0
+ cond1 = ecl_start <= seg_midpoints
+ cond2 = seg_midpoints <= ecl_end
+ cond = cond1 & cond2
+ idx = np.arange(len(self.seg_list))[cond]
+ self.seg_initial_delta_phi[idx] += ecl_corr
+ self.seg_segflags[idx] |= 4
+ # print("Eclipse start: ", ecl_start, "Eclipse end: ", ecl_end)
+ # print("First eclipse segment")
+ # self.seg_list[idx[0]].print()
+ # print("Last eclipse segment")
+ # idx_len = len(idx)
+ # self.seg_list[idx[idx_len - 1]].print()
+ for si in idx:
+ seg = self.seg_list[si]
+ seg.initial_delta_phi += ecl_corr
+ seg.segflags |= 4
+
+ def findseg_t(self,
+ t: np.ndarray) -> np.ndarray:
+ """ Return an ndarray of index values for the segments covering a set of input times
+
+ This is a helper routine for the interp_t method.
+
+ Args:
+ t (ndarray(dtype=float): Input times
+
+ Returns:
+ ndarray(dtype=int) of indices into the seg_* attributes for each input time
+ """
+
+ idx1 = np.searchsorted(self.seg_times, t)
+ idx2 = np.searchsorted(self.seg_t2, t)
+ diff = (idx2 - (idx1 + 1)).nonzero()
+ if len(diff) == len(t):
+ logging.warning('Indices don''t match up:')
+ logging.warning(idx1)
+ logging.warning(idx2)
+ else:
+ # If any points are beyond the last segment t2, assign them to the last segment and extrapolate.
+ idx_max = self.seg_count - 1
+ idx_extrap = idx2 > idx_max
+ idx_adjusted = idx2
+ idx_adjusted[idx_extrap] = idx_max
+ return idx_adjusted
+
+ def interp_t(self,
+ t: np.ndarray,
+ use_spinphase_correction: bool = True) -> SpinmodelInterpTResult:
+ """ Interpolate the spin model to a set of input times, and return an object holding the results.
+
+ This is the workhorse routine for accessing the spin model. Clients will specify a set of input times (e.g.
+ from a tplot variable to be operated on), then obtain the spin status at those times, which can be used to
+ transform back and forth between SSL coordinates (spinning with the spacecraft) and DSL (despun, referenced to
+ sun direction and angular momentum vector/spin axis).
+
+ Rather than inefficiently iterating over the spinmodel segments and input times, the code performs vectorized
+ operations on Numpy arrays. This is done by calculating a set of segment indices corresponding to the input time
+ stamps, then replicating the segment data to match the number of input times. (So, we are trading extra memory
+ usage to allow vectorized bulk calculations) To minimize branching, several boolean conditions are evaluated,
+ yielding sets of array indices matching each set of conditions. The appropriate calculations are performed in
+ bulk for each set of condition indices. Finally, the various outputs are collected into a SpinmodelInterpTResult
+ object and returned.
+
+ Args:
+ t (ndarray(dtype=float): Input times
+ use_spinphase_correction (Boolean): Flag (defaults to True) specifying whether V03 state CDF corrections
+ should be applied to the interpolation output.
+
+ Returns:
+ SpinmodelInterpTResult object containing the interpolated outputs (spinphase, spincount, spin period,
+ eclipse corrections, etc)
+ """
+
+ segs_idx = self.findseg_t(t)
+ n = len(segs_idx)
+
+ my_seg_times = self.seg_times[segs_idx]
+ my_seg_t2 = self.seg_t2[segs_idx]
+ my_seg_b = self.seg_b[segs_idx]
+ my_seg_c = self.seg_c[segs_idx]
+ my_seg_c1 = self.seg_c1[segs_idx]
+ my_seg_c2 = self.seg_c2[segs_idx]
+ my_seg_initial_delta_phi = self.seg_initial_delta_phi[segs_idx]
+ my_seg_idpu_spinper = self.seg_idpu_spinper[segs_idx]
+ my_seg_segflags = self.seg_segflags[segs_idx]
+
+ # Below this point, all indexing assumes n = #times, not n = #model_segments. No references to self.anything.
+ all_idx = np.arange(n)
+ # output variables
+ spincount = np.zeros(n)
+ t_last = np.zeros(n)
+ spinphase = np.zeros(n)
+ spinper = np.zeros(n)
+ eclipse_delta_phi = np.zeros(n) # It is important that this variable is initialized to 0
+ mask = np.zeros(n) + 3
+
+ # Internal variables.
+ # If memory becomes an issue, we can avoid allocating by using some creative index operations
+ dt = np.zeros(n)
+ fracspins = np.zeros(n)
+ intspins = np.zeros(n, int)
+ bp = np.zeros(n)
+ phi_lastpulse = np.zeros(n)
+ tlast_dt = np.zeros(n)
+
+ idx1_cond = t < my_seg_times
+ idx1 = all_idx[idx1_cond]
+ c1 = len(idx1)
+
+ idx2_cond = t > my_seg_t2
+ idx2 = all_idx[idx2_cond]
+ c2 = len(idx2)
+
+ # each of the conditions below splits based on the truth of this subclause
+ branch1_cond = np.abs(my_seg_c) < 1.0e-12
+ branch1_idx = all_idx[branch1_cond]
+ branch1_cidx = all_idx[~branch1_cond]
+ branch1_c = len(branch1_idx)
+ branch1_nc = len(branch1_cidx)
+ tmp1 = np.bitwise_and(my_seg_segflags, 3)
+ tmp2 = np.equal(tmp1, mask)
+ tmp3 = np.greater(my_seg_idpu_spinper, 1.0)
+ branch2_cond = tmp2 & tmp3
+ branch2_idx = all_idx[branch2_cond]
+
+ idx3_cond = (t >= my_seg_times) & (t <= my_seg_t2)
+ idx3 = all_idx[idx3_cond]
+ c3 = len(idx3)
+
+ if c1 > 0:
+ dt[idx1] = my_seg_times[idx1] - t[idx1]
+ spinper[idx1] = 360.0 / my_seg_b[idx1]
+ fracspins[idx1] = dt[idx1] / spinper[idx1]
+ intspins[idx1] = np.ceil(fracspins[idx1])
+ spinphase[idx1] = (intspins[idx1] - fracspins[idx1]) * 360.0
+ spincount[idx1] = my_seg_c1[idx1] - intspins[idx1]
+ t_last[idx1] = my_seg_times[idx1] - intspins[idx1] * spinper[idx1]
+
+ if c2 > 0:
+ dt[idx2] = t[idx2] - my_seg_t2[idx2]
+ bp[idx2] = my_seg_b[idx2] + 2.0 * my_seg_c[idx2] * (my_seg_t2[idx2] - my_seg_times[idx2])
+ spinper[idx2] = 360.0 / bp[idx2]
+ fracspins[idx2] = dt[idx2] / spinper[idx2]
+
+ idx2_branch_cond = idx2_cond & branch2_cond
+ idx2_branch = all_idx[idx2_branch_cond]
+ if len(idx2_branch) > 0:
+ model_phi = fracspins[idx2_branch] * 360.0
+ idpu_bp = 360.0 / my_seg_idpu_spinper[idx2_branch]
+ idpu_phi = dt[idx2_branch] * idpu_bp
+ eclipse_delta_phi[idx2_branch] = my_seg_initial_delta_phi[idx2_branch] + (model_phi - idpu_phi)
+ intspins[idx2] = np.floor(fracspins[idx2])
+ spinphase[idx2] = (fracspins[idx2] - intspins[idx2]) * 360.0
+ spincount[idx2] = my_seg_c2[idx2] + intspins[idx2]
+ t_last[idx2] = my_seg_t2[idx2] + intspins[idx2] * spinper[idx2]
+
+ if c3 > 0:
+ dt[idx3] = t[idx3] - my_seg_times[idx3]
+ phi = my_seg_b[idx3] * dt[idx3] + my_seg_c[idx3] * dt[idx3] * dt[idx3]
+ bp[idx3] = my_seg_b[idx3] + 2.0 * my_seg_c[idx3] * dt[idx3]
+ spinper[idx3] = 360.0 / bp[idx3]
+ spinphase[idx3] = np.fmod(phi, 360.0)
+ fracspins[idx3] = phi / 360.0
+ spincount[idx3] = np.floor(fracspins[idx3])
+ phi_lastpulse[idx3] = spincount[idx3] * 360.0
+
+ if branch1_c > 0:
+ tlast_dt[branch1_idx] = phi_lastpulse[branch1_idx] / my_seg_b[branch1_idx]
+
+ if branch1_nc > 0:
+ tlast_dt[branch1_cidx] = (-my_seg_b[branch1_cidx] +
+ np.sqrt(my_seg_b[branch1_cidx] ** 2 - 4.0 * my_seg_c[branch1_cidx] * (
+ -phi_lastpulse[branch1_cidx]))) / (2.0 * my_seg_c[branch1_cidx])
+
+ idx3_branch_cond = idx3_cond & branch2_cond
+ idx3_branch = all_idx[idx3_branch_cond]
+
+ if len(idx3_branch) > 0:
+ model_phi = fracspins[idx3_branch] * 360.0
+ idpu_bp = 360.0 / my_seg_idpu_spinper[idx3_branch]
+ idpu_phi = dt[idx3_branch] * idpu_bp
+ eclipse_delta_phi[idx3_branch] = my_seg_initial_delta_phi[idx3_branch] + (model_phi - idpu_phi)
+ t_last[idx3] = my_seg_times[idx3] + tlast_dt[idx3]
+ spincount[idx3] = spincount[idx3] + my_seg_c1[idx3]
+
+ if use_spinphase_correction:
+ logging.info("applying spinphase correction")
+ interp_correction = np.interp(t, self.spin_corr_times, self.spin_corr_vals)
+ spinphase -= interp_correction
+ cond = spinphase > 360.0
+ spinphase[cond] -= 360.0
+ cond = spinphase < 0.0
+ spinphase[cond] += 360.0
+
+ res = SpinmodelInterpTResult(spincount=spincount, spinphase=spinphase, t_last=t_last,
+ eclipse_delta_phi=eclipse_delta_phi, spinper=spinper, segflags=my_seg_segflags,
+ idx=segs_idx, dt=dt)
+ return res
+
+ def make_arrays(self):
+ """ Populate the seg_* attributes using data from the segment list
+
+ For compatibility with the IDL implementation, we use the same algorithm in Python to build up a segment
+ list from the STATE CDF variables. In IDL, we can do vectorized access to class members through an array
+ of indices, but this doesn't work the same way in Python. Here, it is most convenient to convert the segment
+ list to a set of Numpy arrays, which lets us do vectorized calculations on them.
+
+ Args:
+
+ Returns:
+ """
+
+ self.seg_count = len(self.seg_list)
+ self.seg_times = np.array([o.t1 for o in self.seg_list])
+ self.seg_t2 = np.array([o.t2 for o in self.seg_list])
+ self.seg_c1 = np.array([o.c1 for o in self.seg_list])
+ self.seg_c2 = np.array([o.c2 for o in self.seg_list])
+ self.seg_b = np.array([o.b for o in self.seg_list])
+ self.seg_c = np.array([o.c for o in self.seg_list])
+ self.seg_npts = np.array([o.npts for o in self.seg_list])
+ self.seg_maxgap = np.array([o.maxgap for o in self.seg_list])
+ self.seg_phaserr = np.array([o.phaserr for o in self.seg_list])
+ self.seg_idpu_spinper = np.array([o.idpu_spinper for o in self.seg_list])
+ self.seg_initial_delta_phi = np.array([o.initial_delta_phi for o in self.seg_list])
+ self.seg_segflags = np.array([o.segflags for o in self.seg_list])
+
+ def make_tplot_vars(self,
+ prefix: str):
+ """ Create a set of tplot variables from the spinmodel segment attributes.
+
+ This is useful for regression testing or cross-platform validation of the spin model creation process.
+
+ :param prefix : A string to prepend to each tplot variable name to ensure uniqueness
+ :return:
+ """
+
+ store_data(prefix + 't1', data={'x': self.seg_times, 'y': self.seg_times})
+ store_data(prefix + 't2', data={'x': self.seg_times, 'y': self.seg_t2})
+ store_data(prefix + 'c1', data={'x': self.seg_times, 'y': self.seg_c1})
+ store_data(prefix + 'c2', data={'x': self.seg_times, 'y': self.seg_c2})
+ store_data(prefix + 'b', data={'x': self.seg_times, 'y': self.seg_b})
+ store_data(prefix + 'c', data={'x': self.seg_times, 'y': self.seg_c})
+ store_data(prefix + 'npts', data={'x': self.seg_times, 'y': self.seg_npts})
+ store_data(prefix + 'maxgap', data={'x': self.seg_times, 'y': self.seg_maxgap})
+ store_data(prefix + 'phaserr', data={'x': self.seg_times, 'y': self.seg_phaserr})
+ store_data(prefix + 'idpu_spinper', data={'x': self.seg_times, 'y': self.seg_idpu_spinper})
+ store_data(prefix + 'initial_delta_phi', data={'x': self.seg_times, 'y': self.seg_initial_delta_phi})
+ store_data(prefix + 'segflags', data={'x': self.seg_times, 'y': self.seg_segflags})
+
+ def addseg(self,
+ newseg: SpinmodelSegment):
+ """ Add a segment to the spin model object being constructed.
+
+ A spin model is assumed to satisfy a condition that the end time of one segment exactly matches the start time
+ of the next segment, with no gaps or overlaps.
+
+ When loading STATE data for time intervals spanning multiple UTC days, there will be small time gaps
+ between the spin model segments at either side of UTC date boundaries. This routine adjusts the segment
+ list by inserting new 'bridge' segments, as needed, to bridge gaps between the segments read from the CDF data,
+ also making any necessary adjustments to the preceding/following segments.
+
+ :param newseg: A SpinmodelSegment object to be added to the spin model being constructed
+ :return:
+ """
+
+ if self.seg_count == 0:
+ self.seg_list.append(newseg)
+ self.lastseg = 0
+ self.seg_count = 1
+ else:
+ lseg = self.seg_list[self.lastseg]
+ # print('Adding segment')
+ # print('Last:')
+ # lseg.print()
+ # print('Current')
+ # newseg.print()
+ # Previously, this was an exact equality test. Now, the DEPEND_TIME variables are handled slightly
+ # differently than plain old double-precision variables, so the possibility of small floating point
+ # differences needs to be accounted for. For the purposes of spin model segments, if the times are within
+ # a microsecond, they might as well be equal.
+ tdiff = abs(newseg.t1 - lseg.t2)
+ tolerance = 1.0e-06
+ if tdiff < tolerance:
+
+ #
+ # Normal case: segments are contiguous
+ #
+ newseg.c1 = lseg.c2
+ newseg.c2 = lseg.c2 + newseg.c2
+ self.seg_list.append(newseg)
+ self.lastseg += 1
+ self.seg_count += 1
+ else:
+ # Segments are not contiguous -- this should indicate
+ # a UTC date boundary, and the spin models on either side will
+ # need to be merged.
+ #
+ # There are several cases, depending on the delta-t between the
+ # end of the previous segment, and the start of the current segment:
+ #
+ # 1) Large gap, greater than 1/2 spin : create a new segment to
+ # bridge the gap.
+ # 2) Small gap, <= 1/2 spin, previous segment covers 2 or more spins:
+ # remove last spin from previous segment, converting the situation
+ # to the "large gap" case, then create a new segment to bridge
+ # the gap.
+ # 3) Small gap, previous segment only contains 1 spin : if current
+ # segment contains 2 or more spins, remove first spin from
+ # current segment, converting the situation to the "large gap"
+ # case, then create a new segment to bridge the gap.
+ # 4) Small gap, previous and current segments each contain only
+ # a single spin. This should never happen -- if no averaging
+ # was applied, the segments should be exactly contiguous.
+ # 5) Negative gap -- current segment starts more than 1/2 spin
+ # before end of previous segment. This should never happen,
+ # since it would imply that the apid 305 packets are incorrectly
+ # time ordered.
+
+ spinper = 360.0 / lseg.b
+ gap_spin_count = (newseg.t1 - lseg.t2) / spinper
+ gap_time = newseg.t1 - lseg.t2
+ if gap_spin_count > 0.5:
+ # Case 1: Gap of 1 or more spins between segments, add fill
+ gap_nspins = math.floor(gap_spin_count + 0.5)
+ gap_spinper = (newseg.t1 - lseg.t2) / (1.0 * gap_nspins)
+ # Fill in eclipse delta_phi parameters
+ gap_idpu_spinper = (newseg.idpu_spinper + lseg.idpu_spinper) / 2.0
+ gap_segflags = newseg.segflags & lseg.segflags
+ # We need to calculate gap_initial_delta_phi by extrapolating
+ # from lseg to lseg.t2 = gapseg.t1
+ spinper, spinphase, dummy_spincount, gap_eclipse_delta_phi, dummy_t_last = \
+ lseg.interp_t(lseg.t2)
+ fillseg = SpinmodelSegment(t1=lseg.t2, t2=newseg.t1, c1=lseg.c2, c2=lseg.c2 + gap_nspins,
+ b=360.0 / gap_spinper, c=0.0, npts=0, maxgap=gap_time, phaserr=0.0,
+ initial_delta_phi=gap_eclipse_delta_phi,
+ idpu_spinper=gap_idpu_spinper, segflags=gap_segflags)
+ self.seg_list.append(fillseg)
+ self.lastseg = self.lastseg + 1
+ self.seg_count += 1
+ newseg.c1 = fillseg.c2
+ newseg.c2 = newseg.c1 + newseg.c2
+ self.seg_list.append(newseg)
+ self.lastseg = self.lastseg + 1
+ self.seg_count += 1
+ elif gap_spin_count > -0.5:
+ # Case 2, 3, 4, or 5
+ # Now that we're using floating point time comparisons with tolerance, rather than strict
+ # equality, none of these cases seem to occur anymore. Cases 2 and (possibly) 3 have apparently
+ # been absorbed into the "segments are contiguous (within tolerance)" case. Cases 4 and 5 were
+ # "this should never happen" scenarios. It would be somewhat difficult to contrive plausible test
+ # inputs that would trigger cases 2 or 3 in this iteration of the code, so (for the sake of our test
+ # coverage metrics), I will replace those code blocks with exceptions. If they ever do turn up
+ # in practice, we can revive them from the version history or from the IDL code. -- JWL 2023/03/13
+
+ if (lseg.c2 - lseg.c1) >= 2:
+ # Case 2: small gap, previous segment has at least 2 spins
+ # dprint,'<1 spin gap, stealing spin from last segment'
+ logging.error('Unexpected case 2 (small gap, previous segment with at least 2 spins)')
+ logging.error('Segment time: ' + time_string(lseg.t2))
+ logging.error('Please contact pyspedas or themis support and include the above information.')
+ raise RuntimeError
+ elif newseg.c2 >= 2:
+ # Case 3: small gap, previous segment has only 1 spin, current segment has at least 2 spins
+ # It is assumed that newseg is the first segment of a new UTC day, therefore the spin numbers
+ # start over at 0. So we want to change newseg to start at spin 1 instead of spin 0.
+ logging.error('Unexpected case 3 (small gap, previous segment with only 1 spin, current segment with 2+ spins.)')
+ logging.error('Segment time: ' + time_string(lseg.t2))
+ logging.error('Please contact pyspedas or themis support and include the above information.')
+ raise RuntimeError
+ else:
+ # Case 4: small gap, but segments on either side only contain
+ # one spin each. This should never happen.
+ logging.error('Unexpected case 4 (<1 spin gap, but neither segment has enough spins to steal.)')
+ logging.error('Segment time: ' + time_string(lseg.t2))
+ logging.error('Please contact pyspedas or themis support and include the above information.')
+ raise RuntimeError
+ else:
+ # Case 5: out of order sun pulse times. This should never happen.
+ logging.error('Unexpected case 5 (Sun pulse times out of order)')
+ logging.error("Last segment end time" + time_string(lseg.t2) + " New segment start time " + time_string(newseg.t1))
+ raise RuntimeError
+
+ def get_timerange(self):
+ """ Returns the time span covered by the model.
+ The IDL version also returns information about any eclipse time periods, will add later
+ if needed.
+
+ Args: None
+
+ Returns: tuple with start time and end time
+ """
+ start_time = self.seg_times[0]
+ end_time = self.seg_t2[-1]
+ return start_time, end_time
+
+ def get_eclipse_times(self, min_shadow_duration:float=60.0):
+ """ Returns lists of start time and end times for eclipses found in the spin model
+
+ Args:
+ None
+
+ Returns: A tuple containing two listss, one for start times and one for end times. Returns empty
+ lists if no eclipses found.
+ """
+ start_times=[]
+ end_times=[]
+ processing_shadow=False
+
+ for i in range(self.seg_count):
+ this_eclipse_flag=self.seg_segflags[i] & 1
+ if not(this_eclipse_flag) and not(processing_shadow):
+ # Previous and current segments are not eclipses, do nothing
+ pass
+ elif not(this_eclipse_flag) and processing_shadow:
+ # Transition out of shadow, reset status
+ processing_shadow=False
+ elif this_eclipse_flag and not(processing_shadow):
+ # Transition into shadow, add entries to start and end time lists, set status
+ start_times.append(self.seg_times[i])
+ end_times.append(self.seg_t2[i])
+ processing_shadow=True
+ else:
+ # Previous and current segments in shadow, update last end time
+ end_times[-1]=self.seg_t2[i]
+
+ return start_times, end_times
+
+
+
+
+
+ def __init__(self,
+ probe,
+ correction_level):
+ self.lastseg = SpinmodelSegment(t1=0.0, t2=0.0, c1=0, c2=0, b=0.0, c=0.0, npts=0, maxgap=0.0, phaserr=0.0,
+ initial_delta_phi=0.0, idpu_spinper=0.0, segflags=0)
+ self.seg_times = np.zeros(1, float)
+ self.seg_t2 = np.zeros(1, float)
+ self.seg_c1 = np.zeros(1, int)
+ self.seg_c2 = np.zeros(1, int)
+ self.seg_b = np.zeros(1, float)
+ self.seg_c = np.zeros(1, float)
+ self.seg_npts = np.zeros(1, int)
+ self.seg_maxgap = np.zeros(1, float)
+ self.seg_phaserr = np.zeros(1, float)
+ self.seg_initial_delta_phi = np.zeros(1, float)
+ self.seg_idpu_spinper = np.zeros(1, float)
+ self.seg_segflags = np.zeros(1, int)
+ self.seg_count = 0
+ self.seg_list = []
+ seg_times, tend_data = get_sm_data(probe, 'tend', correction_level)
+ t, spinper_data = get_sm_data(probe, 'spinper', correction_level)
+ t, c_data = get_sm_data(probe, 'c', correction_level)
+ t, phaserr_data = get_sm_data(probe, 'phaserr', correction_level)
+ t, nspins_data = get_sm_data(probe, 'nspins', correction_level)
+ t, npts_data = get_sm_data(probe, 'npts', correction_level)
+ t, maxgap_data = get_sm_data(probe, 'maxgap', correction_level)
+ t, initial_delta_phi_data = get_sm_data(probe, 'initial_delta_phi', correction_level)
+ t, idpu_spinper_data = get_sm_data(probe, 'idpu_spinper', correction_level)
+ t, segflags_data = get_sm_data(probe, 'segflags', correction_level)
+ # The spin_correction variable only exists in V03 state CDFs, and has its own time variable
+ tmp = get_sm_data(probe, 'correction', 0)
+ if tmp is None:
+ logging.info('spin_correction variable not available, defaulting to 0.0')
+ self.spin_corr_times = [0.0, 1.0]
+ self.spin_corr_vals = [0.0, 0.0]
+ else:
+ self.spin_corr_times, self.spin_corr_vals = tmp
+
+ # The fgm_corr_offset and fgm_corr_tend variables may not exist, and have their own time variable
+ tmp = get_sm_data(probe, 'fgm_corr_offset', correction_level)
+ if tmp is None:
+ do_fgm_corr = False
+ logging.info('FGM correction variables not available')
+ else:
+ do_fgm_corr = True
+ fgm_corr_time, fgm_corr_offset = tmp
+ t, fgm_corr_tend = get_sm_data(probe, 'fgm_corr_tend', correction_level)
+
+ seg_count = len(seg_times)
+ # tlast = seg_times[0]
+ for i in range(seg_count):
+ newseg = SpinmodelSegment(t1=seg_times[i], t2=tend_data[i], c1=0, c2=nspins_data[i],
+ b=360.0 / spinper_data[i],
+ c=c_data[i], npts=npts_data[i], maxgap=maxgap_data[i], phaserr=phaserr_data[i],
+ initial_delta_phi=initial_delta_phi_data[i], idpu_spinper=idpu_spinper_data[i],
+ segflags=segflags_data[i])
+ # tlast=tend_data[i]
+ # origseg=copy.copy(newseg)
+ self.addseg(newseg)
+ # print(i)
+ # newseg.print()
+ # lastseg=copy.copy(origseg)
+ self.make_arrays()
+ if do_fgm_corr and (correction_level == 2):
+ logging.info(f"applying FGM corrections, do_fgm_corr = {do_fgm_corr}, correction_level = {correction_level}")
+ for i in np.arange(len(fgm_corr_offset)):
+ self.adjust_delta_phi(fgm_corr_time[i], fgm_corr_tend[i], fgm_corr_offset[i])
+ else:
+ logging.info(f"Skipping FGM corrections, do_fgm_corr {do_fgm_corr}, correction_level = {correction_level}")
+
+
+# This dictionary is where the spinmodel objects are stored. The keys are tuples of (probe, correction_level)
+# and the values are Spinmodel objects. Spinmodel objects are added to the dictionary by the
+# spinmodel_postprocess routine.
+
+spinmodel_dict: Dict[(str, int)] = {}
+
+
+def get_spinmodel(probe: str,
+ correction_level: int,
+ quiet: bool = False) -> Spinmodel:
+ """ Get a reference to a Spinmodel object stored in the dictionary.
+
+ Args:
+ probe: Probe name, one of 'a','b','c','d','e'
+ correction_level: 0 for no corrections, 1 for waveform corrections, 2 for spin fit corrections
+ quiet: If True, do not log anything if the model is uninitialized
+
+ Returns:
+ A reference to a Spinmodel object stored in the dictionary.
+ """
+ try:
+ model = spinmodel_dict[(probe, correction_level)]
+ except KeyError:
+ if not quiet:
+ logging.warning("No spinmodel loaded for probe " + probe + " correction level: " + str(correction_level))
+ logging.warning("It is necessary to load THEMIS state data, with get_support_data=True, to initialize the spin model.")
+ model = None
+ return model
+
+
+def save_spinmodel(probe: str,
+ correction_level: int,
+ model: Spinmodel):
+ """ Store a reference to a Spinmodel object in the dictionary, using the probe and correction level as the key
+
+ Args:
+ probe: Probe name, one of 'a','b','c','d','e'
+ correction_level: 0 for no corrections, 1 for waveform corrections, 2 for spin fit corrections
+ model: A Spinmodel object to store
+
+ """
+ spinmodel_dict[(probe, correction_level)] = model
diff --git a/pyspedas/themis/state/spinmodel/spinmodel_postprocess.py b/pyspedas/themis/state/spinmodel/spinmodel_postprocess.py
new file mode 100644
index 00000000..37498cd0
--- /dev/null
+++ b/pyspedas/themis/state/spinmodel/spinmodel_postprocess.py
@@ -0,0 +1,46 @@
+import logging
+from .spinmodel import Spinmodel, save_spinmodel
+from pytplot import data_exists
+
+
+def spinmodel_postprocess(probe: str):
+ """ Create and initialize three Spinmodel objects using tplot variables loaded from the STATE CDFs.
+
+ The three models correspond to the three available correction levels: 0 = no corrections, 1 = waveform
+ corrections, 2 = spin fit corrections.
+
+ Each of the three models is stored in a dictionary via the save_spinmodel routine.
+
+ Args:
+ probe (str): A single letter string specifying the probe for the models being built.
+
+ """
+
+ # It is possible that /get_support_data was specified, but only a limited set of variables
+ # was requested. Check that all needed spin model variables are present before attempting
+ # to create the models.
+
+ sm_quantities = ['tend', 'spinper', 'c', 'phaserr', 'nspins', 'npts', 'maxgap', 'initial_delta_phi', 'idpu_spinper',
+ 'segflags']
+ missing_var = False
+ for v in sm_quantities:
+ non_ecl_v = 'th' + probe + '_spin_' + v
+ ecl_v = 'th' + probe + '_spin_ecl_' + v
+ if not (data_exists(non_ecl_v) and data_exists(ecl_v)):
+ missing_var = True
+
+ if missing_var:
+ logging.warning("Some required spin model variables were not requested, skipping spin model creation")
+ return
+
+ logging.info("Creating spin model for probe " + probe + " correction level 0")
+ # Expect a warning message here: That name is currently not in tplot.
+ # Maybe there's a better idiom in pytplot for checking whether a variable exists?
+ model0 = Spinmodel(probe, 0)
+ save_spinmodel(probe, 0, model0)
+ logging.info("Creating spin model for probe " + probe + " correction level 1")
+ model1 = Spinmodel(probe, 1)
+ save_spinmodel(probe, 1, model1)
+ logging.info("Creating spin model for probe " + probe + " correction level 2")
+ model2 = Spinmodel(probe, 2)
+ save_spinmodel(probe, 2, model2)
diff --git a/pyspedas/themis/state/spinmodel/spinmodel_segment.py b/pyspedas/themis/state/spinmodel/spinmodel_segment.py
new file mode 100644
index 00000000..9d853495
--- /dev/null
+++ b/pyspedas/themis/state/spinmodel/spinmodel_segment.py
@@ -0,0 +1,118 @@
+import math
+
+
+class SpinmodelSegment:
+ """ An object describing a single segment of a spin model.
+
+ Attributes:
+ t1 (float): Start time of segment (seconds since epoch)
+ t2 (float): End time of segment (seconds since epoch)
+ c1 (int): Spin count at start of segment
+ c2 (int): Spin count at end of segment
+ b (float): Initial spin rate in degrees/sec
+ c (float): Acceleration of spin rate in degrees/sec^2
+ npts (int): Number of data points used to construct this segment
+ maxgap (float): Largest gap, in seconds, in data used to construct this segment
+ phaserr (float): The maximum fitting error in this segment (seconds)
+ initial_delta_phi (float): The offset in degrees between the corrected spin model and uncorrected IDPU model at
+ the start of the segment
+ idpu_spinper (float): The onboard spin period during an eclipse segment
+ segflags (int): Bit mapped flags describing the eclipse and correction status during this segment
+ Bit 0 (lsb) eclipse flag, bit 1 waveform corrections applied, bit 2 spin fit corrections applied
+ """
+
+ __slots__ = 't1', 't2', 'c1', 'c2', 'b', 'c', 'npts', 'maxgap', 'phaserr', 'initial_delta_phi', 'idpu_spinper', \
+ 'segflags'
+
+ def __init__(self,
+ t1: float,
+ t2: float,
+ c1: int,
+ c2: int,
+ b: float,
+ c: float,
+ npts: int,
+ maxgap: float,
+ phaserr: float,
+ initial_delta_phi: float,
+ idpu_spinper: float,
+ segflags: int):
+ self.t1 = t1
+ self.t2 = t2
+ self.c1 = c1
+ self.c2 = c2
+ self.b = b
+ self.c = c
+ self.npts = npts
+ self.maxgap = maxgap
+ self.phaserr = phaserr
+ self.initial_delta_phi = initial_delta_phi
+ self.idpu_spinper = idpu_spinper
+ self.segflags = segflags
+
+ def print(self):
+ """ Print a segment using sensible formatting
+
+ Args:
+
+ Returns:
+ """
+
+ print('%20.8f %20.8f %d %d %f %e %d %f %f %f %f %d' % (self.t1, self.t2, self.c1, self.c2,
+ self.b, self.c, self.npts, self.maxgap, self.phaserr,
+ self.initial_delta_phi,
+ self.idpu_spinper, self.segflags))
+
+# Previously, this module included a set of interpolation and extrapolation routines for
+# single spinmodel segments:
+#
+# extrap_before_t()
+# extrap_after_t()
+# extrap_before_n()
+# extrap_after_n()
+#
+# These routines have been inlined where needed, and users should be calling the interp_t or interp_n methods on
+# the spinmodel object. If needed, the removed routines can be restored from the version history, or the IDL code.
+# JWL 2023-03-13
+#
+
+ def interp_t(self,
+ t: float) -> (float, float, int, float, float):
+ """Return modeled values for a time falling within the segment start/end times
+
+ Args:
+ t (float): Input time (seconds since epoch)
+
+ Returns:
+ A tuple containing the modeled spin period, spin count, spin phase, eclipse_delta_phi, and t_last
+ at the input time.
+
+ """
+
+ dt = t - self.t1
+ phi = (self.b * dt + self.c * dt * dt)
+ bp = self.b + 2.0 * self.c * dt
+ spinper = 360.0 / bp
+ spincount, spinphase = divmod(phi, 360.0)
+ fracspins = phi / 360.0
+ phi_lastpulse = spincount * 360.0
+ if abs(self.c) < 1.0e-12:
+ tlast_dt = phi_lastpulse / self.b
+ else:
+ b = self.b
+ c = self.c
+ tlast_dt = (-b + math.sqrt(b * b - 4.0 * c * (-phi_lastpulse))) / (2.0 * c)
+ if ((self.segflags & 3) == 3) and (self.idpu_spinper > 1.0):
+ model_phi = fracspins * 360.0
+ idpu_bp = 360.0 / self.idpu_spinper
+ idpu_phi = dt * idpu_bp
+ eclipse_delta_phi = self.initial_delta_phi + (model_phi - idpu_phi)
+
+ else:
+ eclipse_delta_phi = 0.0
+ t_last = self.t1 + tlast_dt
+ spincount = spincount + self.c1
+ return spinper, spinphase, spincount, eclipse_delta_phi, t_last
+
+
+
diff --git a/pyspedas/themis/state/state.py b/pyspedas/themis/state/state.py
index 6e38a001..a7a0f730 100644
--- a/pyspedas/themis/state/state.py
+++ b/pyspedas/themis/state/state.py
@@ -1,5 +1,7 @@
-
from pyspedas.themis.load import load
+from pytplot import data_exists, del_data
+from .apply_spinaxis_corrections import apply_spinaxis_corrections
+from .spinmodel.spinmodel_postprocess import spinmodel_postprocess
def state(trange=['2007-03-23', '2007-03-24'],
@@ -12,14 +14,15 @@ def state(trange=['2007-03-23', '2007-03-24'],
downloadonly=False,
notplot=False,
no_update=False,
- time_clip=False):
+ time_clip=False,
+ keep_spin=False):
"""
This function loads THEMIS state data
Parameters:
trange: list of str
time range of interest [starttime, endtime] with the format
- 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
+ ['YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe: str or list of str
@@ -59,12 +62,46 @@ def state(trange=['2007-03-23', '2007-03-24'],
Time clip the variables to exactly the range specified
in the trange keyword
+ keep_spin: bool
+ If True, do not delete the spin model tplot variables after the spin models are built.
+
Returns:
List of tplot variables created.
"""
- return load(instrument='state', trange=trange, level=level, probe=probe,
- suffix=suffix, get_support_data=get_support_data,
- varformat=varformat, varnames=varnames,
- downloadonly=downloadonly, notplot=notplot,
- time_clip=time_clip, no_update=no_update)
+ # If support data is being loaded, premptively delete the thx_spinras_correction and thx_spindec_correction
+ # variables, to avoid dangling corrections if they don't exist in this time interval.
+ if get_support_data:
+ for p in probe:
+ spinras_corrvar='th'+probe+'_spinras_correction'
+ spindec_corrvar='th'+probe+'_spindec_correction'
+ if data_exists(spinras_corrvar):
+ del_data(spinras_corrvar)
+ if data_exists(spindec_corrvar):
+ del_data(spindec_corrvar)
+
+ res = load(instrument='state', trange=trange, level=level, probe=probe,
+ suffix=suffix, get_support_data=get_support_data,
+ varformat=varformat, varnames=varnames,
+ downloadonly=downloadonly, notplot=notplot,
+ time_clip=time_clip, no_update=no_update)
+ if get_support_data:
+ for p in probe:
+ # Process spin model variables
+ spinmodel_postprocess(p)
+ if not keep_spin:
+ spinvar_pattern = 'th' + p + '_spin_*'
+ del_data(spinvar_pattern)
+ # Perform spin axis RA and Dec corrections
+ spinras_var = 'th' + p + '_spinras'
+ delta_spinras_var = 'th' + p + '_spinras_correction'
+ corrected_spinras_var = 'th' + p + '_spinras_corrected'
+
+ spindec_var = 'th' + p + '_spindec'
+ delta_spindec_var = 'th' + p + '_spindec_correction'
+ corrected_spindec_var = 'th' + p + '_spindec_corrected'
+
+ apply_spinaxis_corrections(spinras=spinras_var, delta_spinras=delta_spinras_var,
+ corrected_spinras=corrected_spinras_var, spindec=spindec_var,
+ delta_spindec=delta_spindec_var, corrected_spindec=corrected_spindec_var)
+ return res
diff --git a/pyspedas/themis/tests/autoload_support_tests.py b/pyspedas/themis/tests/autoload_support_tests.py
new file mode 100644
index 00000000..5a325301
--- /dev/null
+++ b/pyspedas/themis/tests/autoload_support_tests.py
@@ -0,0 +1,169 @@
+"""Test gmag and themis load functions."""
+import os
+import unittest
+from pytplot import data_exists, time_string, time_double, del_data, get_timespan
+from pyspedas.themis import autoload_support, get_spinmodel, fit
+from pyspedas.themis.state.autoload_support import load_needed
+
+
+class AutoLoadTestCases(unittest.TestCase):
+ """Test themis support data autoload functions."""
+
+ def test_load_needed_exact(self):
+ # Test logic for determining whether support data needs to be loaded, based on exact comparisons
+ # Timespan 1 completely encloses timespan 2
+ ts1=['2007-03-23 00:00:00', '2007-03-24 00:00:00']
+ ts2=['2007-03-23 00:00:01', '2007-03-23 23:59:59']
+
+ tr1=time_double(ts1)
+ tr2=time_double(ts2)
+ trange_loaded=tr1
+ trange_needed=tr2
+ # trange loaded encloses trange needed on both sides, no load needed
+ self.assertFalse(load_needed(trange_loaded,trange_needed,tolerance=0.0))
+ trange_loaded=tr2
+ trange_needed=tr1
+ # trange loaded is a proper subset of trange needed, load needed
+ self.assertTrue(load_needed(trange_loaded,trange_needed,tolerance=0.0))
+ # tr3 starts before tr1
+ tr3=[tr2[0] - 2.0, tr2[1]]
+ trange_loaded=tr1
+ trange_needed=tr3
+ # trange needed starts before trange loaded, load needed
+ self.assertTrue(load_needed(trange_loaded,trange_needed,tolerance=0.0))
+ # tr4 ends after tr1
+ tr4=[tr2[0], tr2[1]+2.0]
+ trange_loaded=tr1
+ trange_needed=tr4
+ # trange needed ends after trange loaded, load needed
+ self.assertTrue(load_needed(trange_loaded,trange_needed,tolerance=0.0))
+
+ def test_load_needed_tolerance(self):
+ # Test logic for determining whether support data needs to be loaded, while
+ # allowing a certain amount of extrapolation
+ tolerance = 10.0
+ ts1=['2007-03-23 00:00:00', '2007-03-24 00:00:00']
+ tr1 = time_double(ts1)
+ trange_needed=tr1
+ ts2=['2007-03-23 00:00:09', '2007-03-23 23:59:51']
+ tr2=time_double(ts2)
+ trange_loaded=tr2
+ # Loaded data fails to overlap needed data by < tolerance on both sides, load not needed
+ self.assertFalse(load_needed(trange_loaded,trange_needed,tolerance=tolerance))
+ trange_loaded=tr1
+ trange_needed=tr2
+ # Loaded data overlaps needed data on both ends, load not needed
+ self.assertFalse(load_needed(trange_loaded,trange_needed,tolerance=tolerance))
+ trange_loaded=tr1
+ trange_needed=[tr1[0] - 9.0, tr1[1]]
+ # Loaded data fails to overlap needed data by < tolerance on left side, load not needed
+ self.assertFalse(load_needed(trange_loaded,trange_needed,tolerance=tolerance))
+ trange_needed=[tr1[0], tr1[1]+9.0]
+ # Loaded data fails to overlap needed data by < tolerance on right side, load not needed
+ self.assertFalse(load_needed(trange_loaded,trange_needed,tolerance=tolerance))
+ trange_needed=[tr1[0] - 11.0, tr1[1]]
+ # Loaded data fails to overlap needed data by > tolerance on left side, load needed
+ self.assertTrue(load_needed(trange_loaded,trange_needed,tolerance=tolerance))
+ trange_needed=[tr1[0], tr1[1]+11.0]
+ # Loaded data fails to overlap needed data by > tolerance on right side, load needed
+ self.assertTrue(load_needed(trange_loaded,trange_needed,tolerance=tolerance))
+
+
+ def test_autoload_support_from_var(self):
+ """Load FGM."""
+ del_data('thc_*')
+ del_data('slp_*')
+ fit(trange=['2023-01-06','2023-01-07'])
+ autoload_support(varname='thc_fgs_gse',slp=True,spinaxis=True,spinmodel=True)
+ self.assertTrue(data_exists('thc_spinras'))
+ self.assertTrue(data_exists('thc_spindec'))
+ self.assertTrue(data_exists('slp_lun_att_x'))
+ spinmodel=get_spinmodel(probe='c',correction_level=1)
+ self.assertTrue(not (spinmodel is None))
+ trange_needed=get_timespan('thc_fgs_gse')
+ ts1=get_timespan('thc_spinras')
+ ts2=get_timespan('thc_spindec')
+ ts3=get_timespan('slp_lun_att_x')
+ ts4=spinmodel.get_timerange()
+ self.assertFalse(load_needed(ts1,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts2,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts3,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts4,trange_needed,tolerance=120.0))
+
+ def test_autoload_support_without_var(self):
+ """Load FGM."""
+ del_data('thc_*')
+ del_data('slp_*')
+ trange=['2023-01-06','2023-01-07']
+ autoload_support(trange=time_double(trange),probe='c',slp=True,spinaxis=True,spinmodel=True)
+ self.assertTrue(data_exists('thc_spinras'))
+ self.assertTrue(data_exists('thc_spindec'))
+ self.assertTrue(data_exists('slp_lun_att_x'))
+ spinmodel=get_spinmodel(probe='c',correction_level=1)
+ self.assertTrue(not (spinmodel is None))
+ trange_needed=time_double(trange)
+ ts1=get_timespan('thc_spinras')
+ ts2=get_timespan('thc_spindec')
+ ts3=get_timespan('slp_lun_att_x')
+ ts4=spinmodel.get_timerange()
+ self.assertFalse(load_needed(ts1,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts2,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts3,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts4,trange_needed,tolerance=120.0))
+
+ def test_autoload_support_reload_all(self):
+ """Load FGM."""
+ del_data('thc_*')
+ del_data('slp_*')
+ trange=['2008-01-01','2008-01-01']
+ autoload_support(trange=time_double(trange),probe='c',slp=True,spinaxis=True,spinmodel=True)
+ self.assertTrue(data_exists('thc_spinras'))
+ self.assertTrue(data_exists('thc_spindec'))
+ self.assertTrue(data_exists('slp_lun_att_x'))
+ spinmodel=get_spinmodel(probe='c',correction_level=1)
+ self.assertTrue(not (spinmodel is None))
+ trange_needed=time_double(trange)
+ ts1=get_timespan('thc_spinras')
+ ts2=get_timespan('thc_spindec')
+ ts3=get_timespan('slp_lun_att_x')
+ ts4=spinmodel.get_timerange()
+ self.assertFalse(load_needed(ts1,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts2,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts3,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts4,trange_needed,tolerance=120.0))
+ # Now choose a different non-overlapping time range, and ensure everything got reloaded.
+ trange=['2022-01-06','2022-01-07']
+ autoload_support(trange=time_double(trange),probe='c',slp=True,spinaxis=True,spinmodel=True)
+ trange_needed=time_double(trange)
+ ts1=get_timespan('thc_spinras')
+ ts2=get_timespan('thc_spindec')
+ ts3=get_timespan('slp_lun_att_x')
+ spinmodel=get_spinmodel(probe='c',correction_level=1)
+ ts4=spinmodel.get_timerange()
+ self.assertFalse(load_needed(ts1,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts2,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts3,trange_needed,tolerance=120.0))
+ self.assertFalse(load_needed(ts4,trange_needed,tolerance=120.0))
+
+ def test_autoload_support_var_doesntexist(self):
+ # Should warn if a nonexistent tplot variable is passed
+ autoload_support(varname='doesntexist',slp=True,spinaxis=True,spinmodel=True)
+
+ def test_autoload_support_err_no_trange_no_var(self):
+ # Should warn about trange being needed if no tplot variable passed
+ autoload_support(probe='c',slp=True,spinaxis=True,spinmodel=True)
+
+ def test_autoload_support_err_no_trange_no_probe(self):
+ # Should warn about probe being needed if no tplot variable passed and spinaxis or spinmodel
+ trange=['2007-03-23','2007-03-24']
+ autoload_support(trange=trange,probe=None,slp=False,spinaxis=True,spinmodel=True)
+
+ def test_autoload_support_no_probe_no_var_slp_only(self):
+ # Passing a trange only should work if only SLP data is requested
+ del_data('slp_*')
+ trange=['2007-03-23','2007-03-24']
+ autoload_support(trange=trange,slp=True)
+ self.assertTrue(data_exists('slp_lun_att_x'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/themis/tests/tests.py b/pyspedas/themis/tests/tests.py
index 6361883c..ab45edf1 100644
--- a/pyspedas/themis/tests/tests.py
+++ b/pyspedas/themis/tests/tests.py
@@ -1,9 +1,11 @@
"""Test gmag and themis load functions."""
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+import logging
import pyspedas
+import pytplot
+from pytplot import data_exists, get_coords
+
class GmagTestCases(unittest.TestCase):
@@ -35,7 +37,7 @@ def test_check_gmag(self):
def test_load_gmag_data(self):
"""Load gmag."""
- pyspedas.themis.gmag(varnames=['thg_mag_amer'])
+ pyspedas.themis.gmag(varnames=['thg_mag_amer'], sites='amer')
self.assertTrue(data_exists('thg_mag_amer'))
@@ -113,6 +115,30 @@ def test_load_efi_data(self):
pyspedas.themis.efi(time_clip=True, varnames=['thc_eff_e12_efs'])
self.assertTrue(data_exists('thc_eff_e12_efs'))
+ def test_load_slp_data(self):
+ pyspedas.themis.slp()
+ # Check that all data is loaded
+ self.assertTrue(data_exists('slp_sun_ltime'))
+ self.assertTrue(data_exists('slp_lun_ltime'))
+ self.assertTrue(data_exists('slp_sun_pos'))
+ self.assertTrue(data_exists('slp_lun_vel'))
+ self.assertTrue(data_exists('slp_sun_ltime'))
+ self.assertTrue(data_exists('slp_lun_ltime'))
+ self.assertTrue(data_exists('slp_lun_vel'))
+ self.assertTrue(data_exists('slp_sun_att_x'))
+ self.assertTrue(data_exists('slp_sun_att_z'))
+ self.assertTrue(data_exists('slp_lun_att_x'))
+ self.assertTrue(data_exists('slp_lun_att_z'))
+ # Check that coordinate systems are set properly
+ self.assertEqual(get_coords('slp_sun_pos').lower(),'gei')
+ self.assertEqual(get_coords('slp_sun_vel').lower(),'gei')
+ self.assertEqual(get_coords('slp_sun_att_x').lower(),'gei')
+ self.assertEqual(get_coords('slp_sun_att_z').lower(),'gei')
+ self.assertEqual(get_coords('slp_lun_pos').lower(),'gei')
+ self.assertEqual(get_coords('slp_lun_vel').lower(),'gei')
+ self.assertEqual(get_coords('slp_lun_att_x').lower(),'gei')
+ self.assertEqual(get_coords('slp_lun_att_z').lower(),'gei')
+
def test_downloadonly(self):
"""Downloadonly keyword."""
files = pyspedas.themis.efi(downloadonly=True,
diff --git a/pyspedas/themis/tests/tests_dsl_cotrans.py b/pyspedas/themis/tests/tests_dsl_cotrans.py
new file mode 100644
index 00000000..19839aeb
--- /dev/null
+++ b/pyspedas/themis/tests/tests_dsl_cotrans.py
@@ -0,0 +1,287 @@
+"""Tests of ssl2dsl and dsl2gse functions."""
+
+import unittest
+from pytplot import get_data,del_data,tplot_restore,data_exists, set_coords
+from numpy.testing import assert_array_almost_equal_nulp, assert_array_max_ulp, assert_allclose
+from pyspedas.themis import autoload_support, ssl2dsl,dsl2gse
+
+
+class DSLCotransDataValidation(unittest.TestCase):
+ """ Compares cotrans results between Python and IDL """
+
+ @classmethod
+ def setUpClass(cls):
+ """
+ IDL Data has to be downloaded to perform these tests
+ The SPEDAS script that creates the file: projects/themis/state/cotrans/thm_cotrans_validate.pro
+ """
+ from pyspedas.utilities.download import download
+ from pyspedas.themis.config import CONFIG
+
+ # Testing time range
+ cls.t = ['2008-03-23', '2008-03-28']
+
+ # Testing tolerance
+ cls.tol = 1e-10
+
+ # Download tplot files
+ remote_server = 'https://spedas.org/'
+ # remote_name = 'testfiles/thm_cotrans_validate.cdf'
+ remote_name = 'testfiles/thm_cotrans_validate.tplot'
+ datafile = download(remote_file=remote_name,
+ remote_path=remote_server,
+ local_path=CONFIG['local_data_dir'],
+ no_download=False)
+ if not datafile:
+ # Skip tests
+ raise unittest.SkipTest("Cannot download data validation file")
+
+ # Load validation variables from the test file
+ del_data('*')
+ filename = datafile[0]
+ # pytplot.cdf_to_tplot(filename)
+ tplot_restore(filename)
+ #pytplot.tplot_names()
+ cls.basis_x = get_data('basis_x')
+ cls.basis_y = get_data('basis_y')
+ cls.basis_z = get_data('basis_z')
+ cls.basis_x_gei2gse = get_data('basis_x_gei2gse')
+ cls.basis_y_gei2gse = get_data('basis_y_gei2gse')
+ cls.basis_z_gei2gse = get_data('basis_z_gei2gse')
+ cls.basis_x_gse2gei = get_data('basis_x_gse2gei')
+ cls.basis_y_gse2gei = get_data('basis_y_gse2gei')
+ cls.basis_z_gse2gei = get_data('basis_z_gse2gei')
+
+ cls.basis_x_dsl2gse = get_data('basis_x_dsl2gse')
+ cls.basis_y_dsl2gse = get_data('basis_y_dsl2gse')
+ cls.basis_z_dsl2gse = get_data('basis_z_dsl2gse')
+
+ cls.basis_x_gse2dsl = get_data('basis_x_gse2dsl')
+ cls.basis_y_gse2dsl = get_data('basis_y_gse2dsl')
+ cls.basis_z_gse2dsl = get_data('basis_z_gse2dsl')
+
+ cls.basis_x_ssl2dsl = get_data('basis_x_ssl2dsl')
+ cls.basis_y_ssl2dsl = get_data('basis_y_ssl2dsl')
+ cls.basis_z_ssl2dsl = get_data('basis_z_ssl2dsl')
+
+ cls.basis_x_dsl2ssl = get_data('basis_x_dsl2ssl')
+ cls.basis_y_dsl2ssl = get_data('basis_y_dsl2ssl')
+ cls.basis_z_dsl2ssl = get_data('basis_z_dsl2ssl')
+
+ # The cotrans routines now can load their own support data. However, it seems you actually need
+ # some substantial padding of the support data time interval compared to the target variable. I
+ # think this is due to the V03 state spinaxis and spinphase corrections, which are only given once per
+ # day. So you need at least the preceding and following days to be able to interpolate to the current
+ # date, or even two days to account for non-linear interpolation methods. Perhaps autoload_support
+ # should be taking this into account and add some extra padding. The basis test vectors have timestamps
+ # during 2007-03-23, but the IDL test data was generated with state loaded from 2007-03-29 through
+ # 2007-03-30.
+
+ # original time range from IDL test data generation
+ # state_trange = ['2007-03-20', '2007-03-30']
+
+ # smallest time interval that gives acceptably identical results to IDL
+ state_trange = ['2007-03-21','2007-03-25']
+ autoload_support(trange=state_trange, probe='a', spinaxis=True, spinmodel=True)
+
+
+ def setUp(self):
+ """ We need to clean tplot variables before each run"""
+ # del_data('*')
+
+ def test_gei2gse(self):
+ """Validate gei2gse transform """
+ from pyspedas.cotrans.cotrans_lib import subgei2gse
+
+ bx = self.basis_x
+ by = self.basis_y
+ bz = self.basis_z
+ times = bx.times
+ bx_gse = subgei2gse(times, bx.y)
+ by_gse = subgei2gse(times, by.y)
+ bz_gse = subgei2gse(times, bz.y)
+ assert_allclose(bx_gse, self.basis_x_gei2gse.y, atol=1.0e-06)
+ assert_allclose(by_gse, self.basis_y_gei2gse.y, atol=1.0e-06)
+ assert_allclose(bz_gse, self.basis_z_gei2gse.y, atol=1.0e-06)
+
+ def test_gse2gei(self):
+ """Validate gse2gei transform """
+ from pyspedas.cotrans.cotrans_lib import subgse2gei
+
+ bx = self.basis_x
+ by = self.basis_y
+ bz = self.basis_z
+ times = bx.times
+ bx_gei = subgse2gei(times, bx.y)
+ by_gei = subgse2gei(times, by.y)
+ bz_gei = subgse2gei(times, bz.y)
+ assert_allclose(bx_gei, self.basis_x_gse2gei.y, atol=1.0e-06)
+ assert_allclose(by_gei, self.basis_y_gse2gei.y, atol=1.0e-06)
+ assert_allclose(bz_gei, self.basis_z_gse2gei.y, atol=1.0e-06)
+
+ def test_dsl2gse_x(self):
+ """Validate dsl2gse X axis transform """
+
+ set_coords('basis_x', 'DSL')
+ result = dsl2gse('basis_x', 'basis_x_dsl2gse', probe='a')
+ self.assertEqual(result,1)
+ bx_gse = get_data('basis_x_dsl2gse')
+ assert_allclose(bx_gse.y, self.basis_x_dsl2gse.y, atol=1.0e-06)
+
+ def test_dsl2gse_y(self):
+ """Validate dsl2gse Y axis transform """
+ from pyspedas.cotrans.cotrans_lib import subgse2gei
+ set_coords('basis_y', 'DSL')
+ result = dsl2gse('basis_y', 'basis_y_dsl2gse', probe='a')
+ self.assertEqual(result, 1)
+ by_gse = get_data('basis_y_dsl2gse')
+ assert_allclose(by_gse.y, self.basis_y_dsl2gse.y, atol=1.0e-06)
+
+ def test_dsl2gse_z(self):
+ """Validate dsl2gse Z axis transform """
+ set_coords('basis_z', 'DSL')
+ result = dsl2gse('basis_z','basis_z_dsl2gse', probe='a')
+ self.assertEqual(result, 1)
+ bz_gse = get_data('basis_z_dsl2gse')
+ assert_allclose(bz_gse.y, self.basis_z_dsl2gse.y, atol=1.0e-06)
+
+ def test_gse2dsl_x(self):
+ """Validate gse2dsl X axis transform """
+ set_coords('basis_x', 'GSE')
+ result = dsl2gse('basis_x', 'basis_x_gse2dsl', probe='a', isgsetodsl=True)
+ self.assertEqual(result, 1)
+ self.assertEqual(result, 1)
+ bx_gse = get_data('basis_x_gse2dsl')
+ assert_allclose(bx_gse.y, self.basis_x_gse2dsl.y, atol=1.0e-06)
+
+ def test_gse2dsl_y(self):
+ """Validate gse2dsl Y axis transform """
+ from pyspedas.cotrans.cotrans_lib import subgse2gei
+ set_coords('basis_y', 'GSE')
+ result = dsl2gse('basis_y', 'basis_y_gse2dsl', probe='a', isgsetodsl=True)
+ self.assertEqual(result, 1)
+ by_gse = get_data('basis_y_gse2dsl')
+ assert_allclose(by_gse.y, self.basis_y_gse2dsl.y, atol=1.0e-06)
+
+ def test_gse2dsl_z(self):
+ """Validate gse2dsl Z axis transform """
+ set_coords('basis_z', 'GSE')
+ result = dsl2gse('basis_z', 'basis_z_gse2dsl', probe='a', isgsetodsl=True)
+ self.assertEqual(result, 1)
+ bz_gse = get_data('basis_z_gse2dsl')
+ assert_allclose(bz_gse.y, self.basis_z_gse2dsl.y, atol=1.0e-06)
+
+ def test_ssl2dsl_x(self):
+ """Validate ssl2dsl X axis transform """
+ set_coords('basis_x', 'SSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_x','basis_x_ssldsl',probe='a',eclipse_correction_level=1)
+ self.assertEqual(result, 1)
+ bx_dsl = get_data('basis_x_ssl2dsl')
+ assert_allclose(bx_dsl.y, self.basis_x_ssl2dsl.y, atol=1.0e-06)
+
+ def test_ssl2dsl_y(self):
+ """Validate ssl2dsl Y axis transform """
+ set_coords('basis_y', 'SSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_y','basis_y_ssldsl',probe='a',eclipse_correction_level=1, use_spinphase_correction=True)
+ self.assertEqual(result, 1)
+ by_dsl = get_data('basis_y_ssl2dsl')
+ assert_allclose(by_dsl.y, self.basis_y_ssl2dsl.y, atol=1.0e-06)
+
+ def test_ssl2dsl_z(self):
+ """Validate ssl2dsl Z axis transform """
+ set_coords('basis_z', 'SSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_z','basis_z_ssldsl', probe='a',eclipse_correction_level=1,use_spinphase_correction=True)
+ self.assertEqual(result, 1)
+ bz_dsl = get_data('basis_z_ssl2dsl')
+ assert_allclose(bz_dsl.y, self.basis_z_ssl2dsl.y, atol=1.0e-06)
+
+ def test_dsl2ssl_x(self):
+ """Validate dsl2ssl X axis transform """
+ set_coords('basis_x', 'DSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_x','basis_x_dsl2ssl',probe='a',eclipse_correction_level=1, use_spinphase_correction=True, isdsltossl=True)
+ self.assertEqual(result, 1)
+ bx_ssl = get_data('basis_x_dsl2ssl')
+ # This test needs a slightly looser tolerance for some reason.
+ assert_allclose(bx_ssl.y, self.basis_x_dsl2ssl.y, atol=1.5e-06)
+
+ def test_dsl2ssl_y(self):
+ """Validate dsl2ssl Y axis transform """
+ set_coords('basis_y', 'DSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_y','basis_y_dsl2ssl', probe='a', eclipse_correction_level=1, use_spinphase_correction=True, isdsltossl=True)
+ self.assertEqual(result, 1)
+ by_ssl = get_data('basis_y_dsl2ssl')
+ # This test needs a slightly looser tolerance for some reason.
+ assert_allclose(by_ssl.y, self.basis_y_dsl2ssl.y, atol=1.5e-06)
+
+ def test_dsl2ssl_z(self):
+ """Validate dsl2ssl Z axis transform """
+ set_coords('basis_z', 'DSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_z','basis_z_dsl2ssl', probe='a', eclipse_correction_level=1, use_spinphase_correction=True, isdsltossl=True)
+ self.assertEqual(result, 1)
+ bz_ssl = get_data('basis_z_dsl2ssl')
+ assert_allclose(bz_ssl.y, self.basis_z_dsl2ssl.y, atol=1.0e-06)
+
+ def test_catch_mismatch_dsl2ssl_z(self):
+ """Test detection of mismatched input vs. requested coordinate systems in dsl2ssl transform """
+ # Requesting DSL to SSL, but specifying SSL as input coordinate system
+ set_coords('basis_z', 'SSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_z','basis_z_dsl2ssl', probe='a', eclipse_correction_level=1, use_spinphase_correction=True, isdsltossl=True, ignore_input_coord = False)
+ self.assertEqual(result, 0)
+
+ def test_catch_mismatch_ssl2dsl_z(self):
+ """Test detection of mismatched input vs. requested coordinates in ssl2dsl transform """
+ # Requesting SSL to DSL, but specifying DSL as input coordinate system
+ set_coords('basis_z', 'DSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_z','basis_z_ssldsl', probe='a', eclipse_correction_level=1, use_spinphase_correction=True, ignore_input_coord=False)
+ self.assertEqual(result, 0)
+
+ def test_catch_mismatch_gse2dsl_z(self):
+ """Test detection of mismatched input vs requested coordinates in gse2dsl transform """
+ set_coords('basis_z', 'DSL')
+ result = dsl2gse('basis_z', 'basis_z_gse2dsl', probe='a', isgsetodsl=True)
+ self.assertEqual(result, 0)
+
+ def test_catch_mismatch_dsl2gse_z(self):
+ """Test detection of mismatched input vs. requested coordinates in dsl2gse transform """
+ set_coords('basis_z', 'GSE')
+ result = dsl2gse('basis_z', 'basis_z_gse2dsl', probe='a')
+ self.assertEqual(result, 0)
+
+ def test_ignore_mismatch_dsl2ssl_z(self):
+ """Test ability to bypass coordinate system consistency check in dsl2ssl transform """
+ # Requesting DSL to SSL, but specifying SSL as input coordinate system
+ set_coords('basis_z', 'SSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_z','basis_z_dsl2ssl', probe='a', eclipse_correction_level=1, use_spinphase_correction=True, isdsltossl=True, ignore_input_coord = True)
+ self.assertEqual(result, 1)
+
+ def test_ignore_mismatch_ssl2dsl_z(self):
+ """Test ability to bypass coordinate system consistency check in ssl2dsl transform """
+ # Requesting SSL to DSL, but specifying DSL as input coordinate system
+ set_coords('basis_z', 'DSL')
+ # Usually probe can be inferred from the input variable name, but we need it here.
+ result = ssl2dsl('basis_z','basis_z_ssldsl', probe='a', eclipse_correction_level=1,use_spinphase_correction=True, ignore_input_coord=True)
+ self.assertEqual(result, 1)
+
+ def test_ignore_mismatch_gse2dsl_z(self):
+ """Test ability to bypass coordinate system consistency check in gse2dsl transform """
+ set_coords('basis_z', 'DSL')
+ result = dsl2gse('basis_z', 'basis_z_gse2dsl', probe='a', isgsetodsl=True, ignore_input_coord=True)
+ self.assertEqual(result, 1)
+
+ def test_ignore_mismatch_dsl2gse_z(self):
+ """Test ability to bypass coordinate system consistency check in dsl2gse transform """
+ set_coords('basis_z', 'GSE')
+ result = dsl2gse('basis_z', 'basis_z_gse2dsl', probe='a', ignore_input_coord=True)
+ self.assertEqual(result, 1)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/themis/tests/tests_lunar_cotrans.py b/pyspedas/themis/tests/tests_lunar_cotrans.py
new file mode 100644
index 00000000..1b81024c
--- /dev/null
+++ b/pyspedas/themis/tests/tests_lunar_cotrans.py
@@ -0,0 +1,279 @@
+"""Tests of gse2sse and sse2sel functions."""
+import unittest
+from numpy.testing import assert_array_almost_equal_nulp, assert_array_max_ulp, assert_allclose
+from copy import deepcopy
+from pytplot import data_exists, get_data, store_data, cdf_to_tplot, del_data, tplot_restore, replace_metadata
+from pytplot import get_coords,set_coords
+from pyspedas.themis import gse2sse,sse2sel
+
+
+
+class LunCotransDataValidation(unittest.TestCase):
+ """ Compares cotrans results between Python and IDL """
+
+ @classmethod
+ def setUpClass(cls):
+ """
+ IDL Data has to be downloaded to perform these tests
+ The SPEDAS script that creates the file: projects/themis/state/cotrans/thm_cotrans_validate.pro
+ """
+ from pyspedas.utilities.download import download
+ from pyspedas.themis.config import CONFIG
+
+ # Testing time range
+ cls.t = ['2008-03-23', '2008-03-28']
+
+ # Testing tolerance
+ cls.tol = 1e-10
+
+ # Download tplot files
+ remote_server = 'https://spedas.org/'
+ #remote_name = 'testfiles/thm_cotrans_validate.cdf'
+ remote_name = 'testfiles/thm_cotrans_validate.tplot'
+ datafile = download(remote_file=remote_name,
+ remote_path=remote_server,
+ local_path=CONFIG['local_data_dir'],
+ no_download=False)
+ if not datafile:
+ # Skip tests
+ raise unittest.SkipTest("Cannot download data validation file")
+
+ # Load validation variables from the test file
+ del_data('*')
+ filename = datafile[0]
+ #cdf_to_tplot(filename)
+ tplot_restore(filename)
+ # pytplot.tplot_names()
+ # Input variables
+ #coord_set_coord('tha_state_pos_gse','gse')
+ #coord_set_coord('tha_state_vel_gse','gse')
+ cls.tha_state_pos_gse = get_data('tha_state_pos_gse')
+ cls.tha_state_vel_gse = get_data('tha_state_vel_gse')
+ #coord_set_coord('tha_fgs_gse','gse')
+ cls.tha_fgs_gse = get_data('tha_fgs_gse')
+ # GSE<->SSE results
+ #coord_set_coord('tha_state_pos_sse','sse')
+ #coord_set_coord('tha_state_vel_sse','sse')
+ cls.tha_state_pos_sse = get_data('tha_state_pos_sse')
+ cls.tha_state_vel_sse = get_data('tha_state_vel_sse')
+ #coord_set_coord('tha_state_pos_sse_rotate_only','sse')
+ #coord_set_coord('tha_state_vel_sse_rotate_only','sse')
+ cls.tha_state_pos_sse_rotate_only = get_data('tha_state_pos_sse_rotate_only')
+ cls.tha_state_vel_sse_rotate_only = get_data('tha_state_vel_sse_rotate_only')
+ #coord_set_coord('tha_fgs_sse','sse')
+ cls.tha_fgs_sse = get_data('tha_fgs_sse')
+ #coord_set_coord('tha_fgs_sel','sel')
+ cls.tha_fgs_sel = get_data('tha_fgs_sel')
+
+ #coord_set_coord('tha_state_pos_gse_sse_gse','gse')
+ #coord_set_coord('tha_state_vel_gse_sse_gse','gse')
+ cls.tha_state_pos_gse_sse_gse = get_data('tha_state_pos_gse_sse_gse')
+ cls.tha_state_vel_gse_sse_gse = get_data('tha_state_vel_gse_sse_gse')
+
+ #coord_set_coord('tha_state_pos_gse_sse_gse_rotate_only','gse')
+ #coord_set_coord('tha_state_vel_gse_sse_gse_rotate_only','gse')
+ cls.tha_state_pos_gse_sse_gse_rotate_only = get_data('tha_state_pos_gse_sse_gse_rotate_only')
+ cls.tha_state_vel_gse_sse_gse_rotate_only = get_data('tha_state_vel_gse_sse_gse_rotate_only')
+
+ #coord_set_coord('tha_fgs_gse_sse_gse','gse')
+ cls.tha_fgs_gse_sse_gse = get_data('tha_fgs_gse_sse_gse')
+
+ # SSE<->SSL results
+ #coord_set_coord('tha_state_pos_sel','sel')
+ cls.tha_state_pos_sel = get_data('tha_state_pos_sel')
+
+ #coord_set_coord('tha_state_pos_gse_sel_sse','sse')
+ #coord_set_coord('tha_state_vel_gse_sel_sse','sse')
+ cls.tha_state_pos_gse_sel_sse = get_data('tha_state_pos_gse_sel_sse')
+ cls.sse_mat_cotrans = get_data('sse_mat_cotrans')
+ cls.sel_mat_cotrans = get_data('sel_mat_cotrans')
+ cls.sel_x_gei = get_data('sel_x_gei')
+ cls.sel_x_gse = get_data('sel_x_gse')
+ cls.sel_x_sse = get_data('sel_x_sse')
+ cls.sel_y_sse = get_data('sel_y_sse')
+ cls.sel_z_sse = get_data('sel_z_sse')
+
+ # It is no longer necessary to load or pass support data when calling gse2sse and sse2sel
+ # autoload_support(varname='tha_state_pos_gse', slp=True)
+
+ def setUp(self):
+ """ We need to clean tplot variables before each run"""
+ # del_data('*')
+
+ def test_replace_metadata(self):
+ data = get_data('tha_state_pos_gse')
+ orig_meta = deepcopy(get_data('tha_state_pos_gse',metadata=True))
+ orig_coord = get_coords('tha_state_pos_gse')
+ self.assertEqual(orig_coord.lower(), 'gse')
+ store_data('newvar',data={'x':data[0],'y':data[1]})
+ replace_metadata('newvar',orig_meta)
+ self.assertEqual(get_coords('newvar').lower(),'gse')
+ orig_meta['data_att']['coord_sys'] = 'goofy' # won't affect tha_state_pos_gse, should not affect newvar either
+ self.assertEqual(get_coords('newvar').lower(),'gse')
+ self.assertEqual(get_coords('tha_state_pos_gse').lower(),'gse')
+
+ def test_gse2sse_pos(self):
+ """ Validate gse2sse position transform """
+ result = gse2sse('tha_state_pos_gse', 'tha_state_pos_sse', variable_type='pos')
+ self.assertEqual(result,1)
+ py_sse_mat_cotrans = get_data('sse_mat_cotrans')
+ assert_allclose(py_sse_mat_cotrans.y, self.sse_mat_cotrans.y, atol=1.0e-06)
+ pos_sse = get_data('tha_state_pos_sse')
+ pos_meta = get_data('tha_state_pos_sse',metadata=True)
+ self.assertEqual(pos_meta['data_att']['units'],'km')
+ assert_allclose(pos_sse.y, self.tha_state_pos_sse.y, atol=0.1)
+ self.assertEqual(get_coords('tha_state_pos_sse').lower(),'sse')
+
+ def test_gse2sse_pos_rotate_only(self):
+ """ Validate gse2sse position transform """
+ result = gse2sse('tha_state_pos_gse', 'tha_state_pos_sse_rotate_only', variable_type='pos',rotation_only=True)
+ self.assertEqual(result,1)
+ pos_sse = get_data('tha_state_pos_sse_rotate_only')
+ pos_meta = get_data('tha_state_pos_sse',metadata=True)
+ self.assertEqual(pos_meta['data_att']['units'],'km')
+ assert_allclose(pos_sse.y, self.tha_state_pos_sse_rotate_only.y, atol=0.1)
+ self.assertEqual(get_coords('tha_state_pos_sse_rotate_only').lower(),'sse')
+
+ def test_gse2sse_vel(self):
+ """ Validate gse2sse velocity transform """
+ result = gse2sse('tha_state_vel_gse', 'tha_state_vel_sse',variable_type='vel')
+ self.assertEqual(result,1)
+ vel_sse = get_data('tha_state_vel_sse')
+ vel_meta = get_data('tha_state_vel_sse',metadata=True)
+ self.assertEqual(vel_meta['data_att']['units'],'km/s')
+ assert_allclose(vel_sse.y, self.tha_state_vel_sse.y, atol=1.0e-03)
+ self.assertEqual(get_coords('tha_state_vel_sse').lower(),'sse')
+
+ def test_gse2sse_vel_rotate_only(self):
+ """ Validate gse2sse position transform """
+ result = gse2sse('tha_state_vel_gse', 'tha_state_vel_sse_rotate_only', variable_type='vel',rotation_only=True)
+ self.assertEqual(result,1)
+ vel_sse = get_data('tha_state_vel_sse_rotate_only')
+ vel_meta = get_data('tha_state_vel_sse',metadata=True)
+ self.assertEqual(vel_meta['data_att']['units'],'km/s')
+ assert_allclose(vel_sse.y, self.tha_state_vel_sse_rotate_only.y, atol=1.0e-03)
+ self.assertEqual(get_coords('tha_state_vel_sse_rotate_only').lower(),'sse')
+
+ def test_gse2sse_field(self):
+ """ Validate gse2sse field transform """
+ result = gse2sse('tha_fgs_gse', 'tha_fgs_sse')
+ self.assertEqual(result, 1)
+ fgs_sse = get_data('tha_fgs_sse')
+ fgs_meta = get_data('tha_fgs_sse',metadata=True)
+ self.assertEqual(fgs_meta['data_att']['units'],'nT')
+ assert_allclose(fgs_sse.y, self.tha_fgs_sse.y, atol=1.0e-02)
+ self.assertEqual(get_coords('tha_fgs_sse').lower(), 'sse')
+
+ def test_sse2gse_pos(self):
+ """ Validate sse2gse position transform """
+ store_data('tha_state_pos_sse',data={'x':self.tha_state_pos_sse.times, 'y':self.tha_state_pos_sse.y})
+ set_coords('tha_state_pos_sse','sse')
+ before_meta = get_data('tha_state_pos_sse',metadata=True)
+ before_meta['data_att']['units'] = 'km'
+ result = gse2sse('tha_state_pos_sse', 'tha_state_pos_gse_sse_gse',isssetogse=True,
+ variable_type='pos')
+ self.assertEqual(result,1)
+ pos_gse = get_data('tha_state_pos_gse_sse_gse')
+ pos_meta = get_data('tha_state_pos_gse_sse_gse',metadata=True)
+ self.assertEqual(pos_meta['data_att']['units'],'km')
+ assert_allclose(pos_gse.y, self.tha_state_pos_gse_sse_gse.y, atol=0.1)
+ self.assertEqual(get_coords('tha_state_pos_gse_sse_gse').lower(),'gse')
+
+ def test_sse2gse_pos_rotate_only(self):
+ """ Validate sse2gse position transform """
+ store_data('tha_state_pos_sse_rotate_only',
+ data={'x':self.tha_state_pos_sse_rotate_only.times, 'y':self.tha_state_pos_sse_rotate_only.y})
+ set_coords('tha_state_pos_sse_rotate_only','sse')
+ result = gse2sse('tha_state_pos_sse_rotate_only', 'tha_state_pos_gse_sse_gse_rotation_only',isssetogse=True,
+ variable_type='pos', rotation_only=True)
+ self.assertEqual(result,1)
+ pos_gse = get_data('tha_state_pos_gse_sse_gse_rotation_only')
+ assert_allclose(pos_gse.y, self.tha_state_pos_gse_sse_gse_rotate_only.y, atol=0.1)
+ self.assertEqual(get_coords('tha_state_pos_gse_sse_gse_rotate_only').lower(),'gse')
+
+ def test_sse2gse_vel(self):
+ """ Validate sse2gse velocity transform """
+ result = gse2sse('tha_state_vel_sse', 'tha_state_vel_gse_sse_gse',isssetogse=True,
+ variable_type='vel')
+ self.assertEqual(result,1)
+ vel_gse = get_data('tha_state_vel_gse_sse_gse')
+ assert_allclose(vel_gse.y, self.tha_state_vel_gse_sse_gse.y, atol=1.0e-02)
+ self.assertEqual(get_coords('tha_state_vel_gse_sse_gse').lower(),'gse')
+
+ def test_sse2gse_vel_rotate_only(self):
+ """ Validate sse2gse position transform """
+ store_data('tha_state_vel_sse_rotate_only',
+ data={'x':self.tha_state_vel_sse_rotate_only.times, 'y':self.tha_state_vel_sse_rotate_only.y})
+ set_coords('tha_state_vel_sse_rotate_only','sse')
+ result = gse2sse('tha_state_vel_sse_rotate_only', 'tha_state_vel_gse_sse_gse_rotation_only',isssetogse=True,
+ variable_type='pos', rotation_only=True)
+ self.assertEqual(result,1)
+ vel_gse = get_data('tha_state_vel_gse_sse_gse_rotation_only')
+ assert_allclose(vel_gse.y, self.tha_state_vel_gse_sse_gse_rotate_only.y, atol=1.0e-03)
+ self.assertEqual(get_coords('tha_state_vel_gse_sse_gse_rotate_only').lower(),'gse')
+
+ def test_sse2gse_field(self):
+ """ Validate gse2sse field transform """
+ result = gse2sse('tha_fgs_sse','tha_fgs_gse_sse_gse',isssetogse=True)
+ self.assertEqual(result, 1)
+ fgs_gse = get_data('tha_fgs_gse_sse_gse')
+ assert_allclose(fgs_gse.y, self.tha_fgs_gse_sse_gse.y, atol=1.0e-02)
+ self.assertEqual(get_coords('tha_fgs_gse_sse_gse').lower(), 'gse')
+
+ def test_sse2sel_pos(self):
+ """ Validate sse2sel position transform """
+ result = sse2sel('tha_state_pos_sse','tha_state_pos_sel')
+ self.assertEqual(result,1)
+ py_sel_x_gse = get_data('slp_lun_att_x_gse')
+ assert_allclose(self.sel_x_gse.y,py_sel_x_gse.y,atol=1.0e-06)
+ py_sel_x_sse = get_data('sel_x_sse')
+ assert_allclose(self.sel_x_sse.y,py_sel_x_sse.y,atol=1.0e-06)
+ py_sel_y_sse = get_data('sel_y_sse')
+ assert_allclose(self.sel_y_sse.y,py_sel_y_sse.y,atol=1.0e-06)
+ py_sel_z_sse = get_data('sel_z_sse')
+ assert_allclose(self.sel_z_sse.y,py_sel_z_sse.y,atol=1.0e-06)
+ py_sel_mat_cotrans = get_data('sel_mat_cotrans')
+ assert_allclose(py_sel_mat_cotrans.y, self.sel_mat_cotrans.y, atol=1.0e-06)
+ pos_sel = get_data('tha_state_pos_sel')
+ pos_meta = get_data('tha_state_pos_sel',metadata=True)
+ self.assertEqual(pos_meta['data_att']['units'],'km')
+ assert_allclose(pos_sel.y, self.tha_state_pos_sel.y, atol=0.1)
+ self.assertEqual(get_coords('tha_state_pos_sel').lower(),'sel')
+
+ def test_sse2sel_fgs(self):
+ """ Validate sse2sel field transform """
+ result = sse2sel('tha_fgs_sse', 'tha_fgs_sel')
+ self.assertEqual(result,1)
+ fgs_sel = get_data('tha_fgs_sel')
+ assert_allclose(fgs_sel.y, self.tha_fgs_sel.y, atol=.005)
+ self.assertEqual(get_coords('tha_fgs_sel').lower(),'sel')
+
+ def test_sel2sse_pos(self):
+ """ Validate sel2sse position transform """
+ # Restore original baseline input tplot variable
+ store_data('tha_state_pos_sel',data={'x':self.tha_state_pos_sel.times, 'y':self.tha_state_pos_sel.y})
+ set_coords('tha_state_pos_sel','sel')
+
+ result = sse2sel('tha_state_pos_sel', 'tha_state_pos_sel_sse', isseltosse=True)
+ self.assertEqual(result,1)
+ pos_sse = get_data('tha_state_pos_gse_sel_sse')
+ assert_allclose(pos_sse.y, self.tha_state_pos_gse_sel_sse.y, atol=0.1)
+ self.assertEqual(get_coords('tha_state_pos_gse_sel_sse').lower(),'sse')
+
+ def test_sel2sse_field(self):
+ """ Validate sel2sse field transform """
+ # Restore original baseline input tplot variable
+ store_data('tha_fgs_sel',data={'x':self.tha_fgs_sel.times, 'y':self.tha_fgs_sel.y})
+ set_coords('tha_fgs_sel','sel')
+ md_before = get_data('tha_fgs_sel',metadata=True)
+ md_before['data_att']['units'] = 'nT'
+ result = sse2sel('tha_fgs_sel', 'tha_fgs_sel_sse', isseltosse=True)
+ self.assertEqual(result,1)
+ fgs_sse = get_data('tha_fgs_sel_sse')
+ fgs_meta = get_data('tha_fgs_sel_sse',metadata=True)
+ self.assertEqual(fgs_meta['data_att']['units'],'nT')
+ assert_allclose(fgs_sse.y, self.tha_fgs_sse.y, atol=0.1)
+ self.assertEqual(get_coords('tha_fgs_sel_sse').lower(),'sse')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/themis/tests/tests_spinmodel.py b/pyspedas/themis/tests/tests_spinmodel.py
new file mode 100644
index 00000000..91723168
--- /dev/null
+++ b/pyspedas/themis/tests/tests_spinmodel.py
@@ -0,0 +1,178 @@
+""" Tests of spinmodel construction and interpolation """
+import unittest
+from numpy.testing import assert_array_almost_equal_nulp, assert_array_max_ulp, assert_allclose
+from pytplot import get_data, store_data, time_string, del_data, cdf_to_tplot
+from pyspedas.themis import state, get_spinmodel
+
+
+
+class SpinmodelDataValidation(unittest.TestCase):
+ """
+ Compare spin models and interpolation results generated by IDL and Python.
+
+ The corresponding IDL script takes a time range, probe, and eclipse correction level,
+ then creates a tplot variable containing the test parameters. IDL builds a spin model by calling
+ thm_load_state for the given probe and time interval, and dumps all the segment parameters to tplot
+ variables. Then a spin model interpolation routine is called for a set of timestamps, and all the
+ interpolated parameters are dumped to tplot variables. The tplot outputs are saved in a cdf file or a
+ tplot save file.
+
+ On the python side, the validation file is read to get the test parameters. The spin models are
+ created with a call to themis.state(), then the test timestamps are passed to the spinmodel interpolation
+ routine. The Python and IDL tplot variables are each given their own prefix.
+
+ After loading the necessary data, the various tests in this file compare the Python and IDL values
+ for each of the spinmodel segment parameters, and interpolation results.
+ """
+
+ @classmethod
+ def setUpClass(cls):
+ """
+ IDL Data has to be downloaded to perform these tests
+ The SPEDAS script that creates data file: projects/themis/spin/spinmodel_python_test.pro
+ """
+ from pyspedas.utilities.download import download
+ from pyspedas.themis.config import CONFIG
+
+ # Download tplot files
+ remote_server = 'https://spedas.org/'
+ # remote_name = 'testfiles/thm_cotrans_validate.cdf'
+ remote_name = 'testfiles/tha_validate_spinmodel.cdf'
+ datafile = download(remote_file=remote_name,
+ remote_path=remote_server,
+ local_path=CONFIG['local_data_dir'],
+ no_download=False)
+ if not datafile:
+ # Skip tests
+ raise unittest.SkipTest("Cannot download data validation file")
+
+ # Load validation variables from the test file
+ del_data('*')
+ filename = datafile[0]
+ cdf_to_tplot(filename)
+ # pytplot.tplot_restore(filename)
+ t_dummy, trange = get_data('parm_trange')
+ t_dummy, probe_idx = get_data('parm_probe')
+ t_dummy, correction_level = get_data('parm_correction_level')
+ #print(trange)
+ #print(time_string(trange))
+ #print(probe_idx)
+ int_idx = int(probe_idx[0])
+ probes = ['a', 'b', 'c', 'd', 'e']
+ int_corr_level = int(correction_level[0])
+ probe = probes[int_idx]
+ #print(probe)
+ #print(int_corr_level)
+ thm_data = state(trange=trange, probe=probe, get_support_data=True)
+ cls.model = get_spinmodel(probe, int_corr_level)
+ cls.model.make_tplot_vars('py_seg_')
+ #pytplot.tplot_names()
+ dummy_t, tst_times = get_data('interp_times')
+ res = cls.model.interp_t(tst_times)
+ store_data('py_spinphase', data={'x': tst_times, 'y': res.spinphase})
+ store_data('py_spinper', data={'x': tst_times, 'y': res.spinper})
+ store_data('py_spincount', data={'x': tst_times, 'y': res.spincount})
+ store_data('py_t_last', data={'x': tst_times, 'y': res.t_last})
+ store_data('py_eclipse_delta_phi', data={'x': tst_times, 'y': res.eclipse_delta_phi})
+ store_data('py_segflags', data={'x': tst_times, 'y': res.segflags})
+
+ def setUp(self):
+ """ We need to clean tplot variables before each run"""
+ # pytplot.del_data('*')
+
+ def test_seg_t1(self):
+ pydata = get_data('py_seg_t1')
+ idldata = get_data('seg_t1')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_t2(self):
+ pydata = get_data('py_seg_t2')
+ idldata = get_data('seg_t2')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_c1(self):
+ pydata = get_data('py_seg_c1')
+ idldata = get_data('seg_c1')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_c2(self):
+ pydata = get_data('py_seg_c2')
+ idldata = get_data('seg_c2')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_b(self):
+ pydata = get_data('py_seg_b')
+ idldata = get_data('seg_b')
+ assert_allclose(pydata.y, idldata.y, rtol=1.0e-05)
+
+ def test_seg_c(self):
+ pydata = get_data('py_seg_c')
+ idldata = get_data('seg_c')
+ assert_allclose(pydata.y, idldata.y, rtol=1.0e-06)
+
+ def test_seg_npts(self):
+ pydata = get_data('py_seg_npts')
+ idldata = get_data('seg_npts')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_maxgap(self):
+ pydata = get_data('py_seg_maxgap')
+ idldata = get_data('seg_maxgap')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_phaserr(self):
+ pydata = get_data('py_seg_phaserr')
+ idldata = get_data('seg_phaserr')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_idpu_spinper(self):
+ pydata = get_data('py_seg_idpu_spinper')
+ idldata = get_data('seg_idpu_spinper')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_initial_delta_phi(self):
+ pydata = get_data('py_seg_initial_delta_phi')
+ idldata = get_data('seg_initial_delta_phi')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_seg_segflags(self):
+ pydata = get_data('py_seg_segflags')
+ idldata = get_data('seg_segflags')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_interp_spinphase(self):
+ pydata = get_data('py_spinphase')
+ idldata = get_data('interp_spinphase')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-04)
+
+ def test_interp_spinper(self):
+ pydata = get_data('py_spinper')
+ idldata = get_data('interp_spinper')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_interp_spincount(self):
+ pydata = get_data('py_spincount')
+ idldata = get_data('interp_spincount')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_interp_segflags(self):
+ pydata = get_data('py_segflags')
+ idldata = get_data('interp_segflags')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_interp_eclipse_delta_phi(self):
+ pydata = get_data('py_eclipse_delta_phi')
+ idldata = get_data('interp_eclipse_delta_phi')
+ assert_allclose(pydata.y, idldata.y, atol=1.0e-06)
+
+ def test_timerange(self):
+ trange=self.model.get_timerange()
+ print(time_string(trange[0]),time_string(trange[1]))
+
+ def test_eclipse_times(self):
+ start_times,end_times=self.model.get_eclipse_times()
+ for i in range(len(start_times)):
+ print(time_string(start_times[i]),time_string(end_times[i]))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/themis/tests/tests_state.py b/pyspedas/themis/tests/tests_state.py
new file mode 100644
index 00000000..50592680
--- /dev/null
+++ b/pyspedas/themis/tests/tests_state.py
@@ -0,0 +1,98 @@
+import logging
+import unittest
+from pyspedas.themis import state
+from pytplot import data_exists, get_data, del_data, tplot_restore
+from numpy.testing import assert_allclose
+
+class StateDataValidation(unittest.TestCase):
+ """ Tests creation of support variables in themis.state() """
+
+ @classmethod
+ def setUpClass(cls):
+ """
+ IDL Data has to be downloaded to perform these tests
+ The IDL script that creates data file: projects/themis/state/thm_state_validate.pro
+ """
+ from pyspedas.utilities.download import download
+ from pyspedas.themis.config import CONFIG
+
+ # Testing time range
+ cls.t = ['2008-03-23', '2008-03-28']
+
+
+ # Download validation file
+ remote_server = 'https://spedas.org/'
+ remote_name = 'testfiles/thm_state_validate.tplot'
+ datafile = download(remote_file=remote_name,
+ remote_path=remote_server,
+ local_path=CONFIG['local_data_dir'],
+ no_download=False)
+ if not datafile:
+ # Skip tests
+ raise unittest.SkipTest("Cannot download data validation file")
+
+ # Load validation variables from the test file
+ del_data('*')
+ filename = datafile[0]
+ tplot_restore(filename)
+ #pytplot.tplot_names()
+ cls.tha_pos = get_data('tha_state_pos')
+ cls.tha_vel = get_data('tha_state_vel')
+ cls.tha_spinras = get_data('tha_state_spinras')
+ cls.tha_spindec = get_data('tha_state_spindec')
+ cls.tha_spinras_correction = get_data('tha_state_spinras_correction')
+ cls.tha_spindec_correction = get_data('tha_state_spindec_correction')
+ cls.tha_spinras_corrected = get_data('tha_state_spinras_corrected')
+ cls.tha_spindec_corrected = get_data('tha_state_spindec_corrected')
+
+ # Load with pyspedas
+ state(probe='a',trange=cls.t,get_support_data=True)
+
+ def setUp(self):
+ """ We need to clean tplot variables before each run"""
+ # del_data('*')
+
+ def test_state_spinras(self):
+ """Validate state variables """
+ my_data = get_data('tha_state_spinras')
+ assert_allclose(my_data.y,self.tha_spinras.y,rtol=1.0e-06)
+
+ def test_state_spindec(self):
+ """Validate state variables """
+ my_data = get_data('tha_state_spindec')
+ assert_allclose(my_data.y,self.tha_spindec.y,rtol=1.0e-06)
+
+ def test_state_spinras_correction(self):
+ """Validate state variables """
+ my_data = get_data('tha_state_spinras_correction')
+ assert_allclose(my_data.y,self.tha_spinras_correction.y,rtol=1.0e-06)
+
+ def test_state_spindec_correction(self):
+ """Validate state variables """
+ my_data = get_data('tha_state_spindec_correction')
+ assert_allclose(my_data.y,self.tha_spindec_correction.y,rtol=1.0e-06)
+
+ def test_state_spinras_corrected(self):
+ """Validate state variables """
+ my_data = get_data('tha_state_spinras_corrected')
+ assert_allclose(my_data.y,self.tha_spinras_corrected.y,rtol=1.0e-06)
+
+ def test_state_spindec_corrected(self):
+ """Validate state variables """
+ my_data = get_data('tha_state_spindec_corrected')
+ assert_allclose(my_data.y,self.tha_spindec_corrected.y,rtol=1.0e-06)
+
+ def test_state_reload_no_v03(self):
+ # Test overwriting of spin axis correction variables if data is loaded with V03 corrections, then other
+ # data loaded that doesn't have the corrections (prevents dangling correction variables)
+ ts1 = ['2007-03-23','2007-03-24']
+ ts2 = ['2023-01-01','2023-01-02']
+ state(trange=ts1,probe='a',get_support_data=True) # V03 corrections exist
+ self.assertTrue(data_exists('tha_spinras_correction'))
+ self.assertTrue(data_exists('tha_spindec_correction'))
+ state(trange=ts2,probe='a',get_support_data=True) # V03 corrections do not exist
+ self.assertFalse(data_exists('tha_spinras_correction'))
+ self.assertFalse(data_exists('tha_spindec_correction'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/themis/tests/tests_tplot_time.py b/pyspedas/themis/tests/tests_tplot_time.py
new file mode 100644
index 00000000..f367871c
--- /dev/null
+++ b/pyspedas/themis/tests/tests_tplot_time.py
@@ -0,0 +1,117 @@
+import logging
+import unittest
+import pyspedas
+import pytplot
+from numpy.testing import assert_allclose
+
+class TplotTimeValidation(unittest.TestCase):
+ """ Tests creation of support variables in themis.state() """
+
+ @classmethod
+ def setUpClass(cls):
+ pass
+
+ def setUp(self):
+ pass
+ """ We need to clean tplot variables before each run"""
+
+
+ def test_timespan(self):
+ """Test pytplot.timespan as used in ERG notebook"""
+ from pytplot import tplot,timespan
+ from pyspedas.erg import mgf
+ vars = mgf(trange=['2017-03-27', '2017-03-28']) # load MGF Lv.2 8-s data for 0-24 UT on Mar. 27, 2017.
+ tplot('erg_mgf_l2_mag_8sec_sm')
+ timespan('2017-03-27 09:00:00', 6, keyword='hours')
+ tplot(['erg_mgf_l2_mag_8sec_sm', 'erg_mgf_l2_mag_8sec_gsm'])
+
+ def test_subsec_timespan(self):
+ """Test pytplot.timespan as used in ERG notebook"""
+ from pytplot import tplot,timespan
+ from pyspedas.themis import fgm
+ vars = fgm(probe='a',level='l2',trange=['2007-03-23', '2007-03-24'])
+ tplot('tha_fgl_dsl') # full plot
+ timespan('2007-03-23/14:00',0.9,"seconds")
+ tplot('tha_fgl_dsl') # short time interval
+
+ def test_subsec_tlimit(self):
+ """Test pytplot.timespan as used in ERG notebook"""
+ from pytplot import tplot,tlimit
+ from pyspedas.themis import fgm
+ vars = fgm(probe='a',level='l2',trange=['2007-03-23', '2007-03-24'])
+ tplot('tha_fgl_dsl') # full plot
+ tlimit(['2007-03-23/14:00','2007-03-23/14:00:00.9'])
+ tplot('tha_fgl_dsl') # short time interval
+
+ def test_timebar(self):
+ """Test pytplot.timespan as used in ERG notebook"""
+ from pytplot import tplot,tlimit, timebar
+ from pyspedas.themis import fgm
+ vars = fgm(probe='a',level='l2',trange=['2007-03-23', '2007-03-24'])
+ tplot('tha_fgl_dsl') # full plot
+ # Test various timebar formats
+ # Standard format
+ timebar('2007-03-23 14:00:00')
+ tplot('tha_fgl_dsl')
+ # Slash between date and time
+ timebar('2007-03-23/13:00:00')
+ tplot('tha_fgl_dsl')
+ # ISO8601
+ timebar('20070323T1330')
+ tplot('tha_fgl_dsl')
+ tlimit(['2007-03-23/14:00', '2007-03-23/14:00:00.9'])
+ # Subsecond precision
+ timebar('2007-03-23/14:00:00.5')
+ tplot('tha_fgl_dsl')
+
+ def test_tlimit_full_arg(self):
+ """Test pytplot.timespan as used in ERG notebook"""
+ from pytplot import tplot,tlimit
+ from pyspedas.themis import fgm
+ vars = fgm(probe='a',level='l2',trange=['2007-03-23', '2007-03-24'])
+ tplot('tha_fgl_dsl') # full plot
+ tlimit(['2007-03-23/14:00','2007-03-23/14:00:00.9'])
+ tplot('tha_fgl_dsl') # short time interval
+ tlimit('full')
+ tplot('tha_fgl_dsl') # back to full interval
+
+ def test_tlimit_full_flag(self):
+ """Test pytplot.timespan as used in ERG notebook"""
+ from pytplot import tplot,tlimit
+ from pyspedas.themis import fgm
+ vars = fgm(probe='a',level='l2',trange=['2007-03-23', '2007-03-24'])
+ tplot('tha_fgl_dsl') # full plot
+ tlimit(['2007-03-23/14:00','2007-03-23/14:00:00.9'])
+ tplot('tha_fgl_dsl') # short time interval
+ tlimit(full=True)
+ tplot('tha_fgl_dsl') # back to full interval
+
+ def test_tlimit_last_arg(self):
+ """Test pytplot.timespan as used in ERG notebook"""
+ from pytplot import tplot,tlimit
+ from pyspedas.themis import fgm
+ vars = fgm(probe='a',level='l2',trange=['2007-03-23', '2007-03-24'])
+ tplot('tha_fgl_dsl') # full plot
+ tlimit(['2007-03-23/14:00','2007-03-23/15:00:00'])
+ tplot('tha_fgl_dsl') # time interval 1 14:00 to 15:00
+ tlimit(['2007-03-23/12:00','2007-03-23/13:00:00'])
+ tplot('tha_fgl_dsl') # time interval 2 12:00 to 13:00
+ tlimit('last')
+ tplot('tha_fgl_dsl') # back to time interval 1, 14:00 to 15:00
+
+ def test_tlimit_last_flag(self):
+ """Test pytplot.timespan as used in ERG notebook"""
+ from pytplot import tplot,tlimit
+ from pyspedas.themis import fgm
+ vars = fgm(probe='a',level='l2',trange=['2007-03-23', '2007-03-24'])
+ tplot('tha_fgl_dsl') # full plot
+ tlimit(['2007-03-23/14:00','2007-03-23/15:00:00'])
+ tplot('tha_fgl_dsl') # time interval 1 14:00 to 15:00
+ tlimit(['2007-03-23/12:00','2007-03-23/13:00:00'])
+ tplot('tha_fgl_dsl') # time interval 2 12:00 to 13:00
+ tlimit(last=True)
+ tplot('tha_fgl_dsl') # back to time interval 1, 14:00 to 15:00
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pyspedas/themis/tests/validation/dsl2gse.py b/pyspedas/themis/tests/validation/dsl2gse.py
index 0d28553c..6a9730ed 100644
--- a/pyspedas/themis/tests/validation/dsl2gse.py
+++ b/pyspedas/themis/tests/validation/dsl2gse.py
@@ -13,7 +13,7 @@
thm_vars = pyspedas.themis.fgm(probe='a', level='l2')
thm_vars = pyspedas.themis.state(probe='a', get_support_data=True, varnames=['tha_spinras', 'tha_spindec'])
-dsl2gse('tha_fgs_dsl', 'tha_spinras', 'tha_spindec', 'tha_fgs_gse_cotrans')
+dsl2gse('tha_fgs_dsl', 'tha_fgs_gse_cotrans')
data = get_data('tha_fgs_gse_cotrans')
diff --git a/pyspedas/themis/tests/validation/fgm.py b/pyspedas/themis/tests/validation/fgm.py
deleted file mode 100644
index f1c8dfae..00000000
--- a/pyspedas/themis/tests/validation/fgm.py
+++ /dev/null
@@ -1,25 +0,0 @@
-'''
-
-This script loads and prints FGM data at several data points.
-
-This is meant to be called from the IDL test suite for comparison with the data loaded via IDL SPEDAS
-
-'''
-
-
-import pyspedas
-from pytplot import get_data
-
-thm_vars = pyspedas.themis.fgm(probe='c', level='l2')
-
-data = get_data('thc_fgs_dsl')
-
-print(data[0][0:10].round(6).tolist())
-
-print(data[1][1000].tolist())
-
-print(data[1][5000].tolist())
-
-print(data[1][10000].tolist())
-
-print(data[1][20000].tolist())
diff --git a/pyspedas/twins/__init__.py b/pyspedas/twins/__init__.py
index b3346846..8bf949c5 100644
--- a/pyspedas/twins/__init__.py
+++ b/pyspedas/twins/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def ephemeris(trange=['2018-11-5', '2018-11-6'],
probe='1',
@@ -183,3 +184,7 @@ def imager(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='imager', trange=trange, probe=probe, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='TWINS', instrument=instrument, label=label)
diff --git a/pyspedas/twins/load.py b/pyspedas/twins/load.py
index 29594f13..18d7a3c7 100644
--- a/pyspedas/twins/load.py
+++ b/pyspedas/twins/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/twins/tests/tests.py b/pyspedas/twins/tests/tests.py
index 22abfde5..f02c8a4b 100644
--- a/pyspedas/twins/tests/tests.py
+++ b/pyspedas/twins/tests/tests.py
@@ -1,10 +1,9 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_downloadonly(self):
files = pyspedas.twins.imager(downloadonly=True)
@@ -13,6 +12,8 @@ def test_downloadonly(self):
def test_load_img_data(self):
img_vars = pyspedas.twins.imager()
self.assertTrue(data_exists('smooth_image_val'))
+ img_vars = pyspedas.twins.imager(notplot=True)
+ self.assertTrue('smooth_image_val' in img_vars)
def test_load_lad_data(self):
lad_vars = pyspedas.twins.lad(time_clip=True)
@@ -23,5 +24,6 @@ def test_load_ephem_data(self):
ephemeris_vars = pyspedas.twins.ephemeris()
self.assertTrue(data_exists('FLTGEO'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/ulysses/__init__.py b/pyspedas/ulysses/__init__.py
index 5061a9a1..72943932 100644
--- a/pyspedas/ulysses/__init__.py
+++ b/pyspedas/ulysses/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def vhm(trange=['2009-01-01', '2009-01-02'],
datatype='1min',
@@ -504,3 +505,7 @@ def grb(trange=['2003-01-01', '2003-01-02'],
"""
return load(instrument='grb', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Ulysses', instrument=instrument, label=label)
diff --git a/pyspedas/ulysses/load.py b/pyspedas/ulysses/load.py
index b5cc74e9..d14978a7 100644
--- a/pyspedas/ulysses/load.py
+++ b/pyspedas/ulysses/load.py
@@ -1,6 +1,6 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
diff --git a/pyspedas/ulysses/tests/tests.py b/pyspedas/ulysses/tests/tests.py
index 3ee943df..baff314d 100644
--- a/pyspedas/ulysses/tests/tests.py
+++ b/pyspedas/ulysses/tests/tests.py
@@ -1,20 +1,32 @@
-
import os
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
+ def test_load_cospin_data(self):
+ data = pyspedas.ulysses.cospin()
+ self.assertTrue(data_exists('Protons'))
+ self.assertTrue(data_exists('Electrons'))
+ self.assertTrue(data_exists('HiE_protons'))
+ self.assertTrue(data_exists('Z_ge_3'))
+
def test_load_vhm_data(self):
data = pyspedas.ulysses.vhm()
self.assertTrue(data_exists('B_MAG'))
+ data = pyspedas.ulysses.vhm(notplot=True)
+ self.assertTrue('B_MAG' in data)
def test_load_swoops_data(self):
data = pyspedas.ulysses.swoops()
self.assertTrue(data_exists('Density'))
self.assertTrue(data_exists('Temperature'))
self.assertTrue(data_exists('Velocity'))
+ data = pyspedas.ulysses.swoops(datatype='proton-moments_swoops')
+ self.assertTrue(data_exists('Tpar'))
+ self.assertTrue(data_exists('Tper'))
+ self.assertTrue(data_exists('dens'))
def test_load_swics_data(self):
data = pyspedas.ulysses.swics()
@@ -36,5 +48,6 @@ def test_downloadonly(self):
files = pyspedas.ulysses.urap(downloadonly=True, trange=['2003-01-01', '2003-01-02'])
self.assertTrue(os.path.exists(files[0]))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/pyspedas/utilities/dailynames.py b/pyspedas/utilities/dailynames.py
index e7856152..309dd335 100644
--- a/pyspedas/utilities/dailynames.py
+++ b/pyspedas/utilities/dailynames.py
@@ -1,11 +1,18 @@
-
-
+import logging
import numpy as np
from pyspedas import time_string, time_double
-from datetime import datetime
-
-def dailynames(directory='', trange=None, res=24*3600., hour_res=False, file_format='%Y%m%d', prefix='', suffix=''):
- '''
+from datetime import datetime, timezone
+
+
+def dailynames(directory='',
+ trange=None,
+ res=24*3600.,
+ hour_res=False,
+ file_format='%Y%m%d',
+ prefix='',
+ suffix='',
+ return_times=False):
+ """
Creates a list of file names using a time range, resoution and file format
Based on Davin Larson's file_dailynames in IDL SPEDAS
@@ -32,18 +39,19 @@ def dailynames(directory='', trange=None, res=24*3600., hour_res=False, file_for
Returns:
List containing filenames
- '''
+ """
if trange is None:
- print('No trange specified')
+ logging.error('No trange specified')
return
- if hour_res == True:
+ if hour_res:
res = 3600.
file_format = '%Y%m%d%H'
# allows the user to pass in trange as list of datetime objects
if type(trange[0]) == datetime and type(trange[1]) == datetime:
- trange = [time_string(trange[0].timestamp()), time_string(trange[1].timestamp())]
+ trange = [time_string(trange[0].replace(tzinfo=timezone.utc).timestamp()),
+ time_string(trange[1].replace(tzinfo=timezone.utc).timestamp())]
tr = [trange[0], trange[1]]
@@ -62,6 +70,9 @@ def dailynames(directory='', trange=None, res=24*3600., hour_res=False, file_for
times = [(float(num)+mmtr[0])*res for num in range(n)]
+ if return_times:
+ return times
+
dates = []
files = []
@@ -72,4 +83,4 @@ def dailynames(directory='', trange=None, res=24*3600., hour_res=False, file_for
for date in dates:
files.append(directory + prefix + date + suffix)
- return files
\ No newline at end of file
+ return files
diff --git a/pyspedas/utilities/data_exists.py b/pyspedas/utilities/data_exists.py
index b3b2089e..f50983b8 100644
--- a/pyspedas/utilities/data_exists.py
+++ b/pyspedas/utilities/data_exists.py
@@ -1,14 +1,10 @@
-
-import numpy as np
import pytplot
+import logging
def data_exists(tvar):
"""
Checks if a tplot variable exists
"""
- if tvar in pytplot.data_quants.keys():
- data = pytplot.get_data(tvar)
- # multi-dimensional data returns a tuple, NRV variables return an ndarray
- if isinstance(data, tuple) or isinstance(data, np.ndarray) or isinstance(data, str) or isinstance(data, list):
- return True
- return False
+ logging.info("data_exists has been moved to the pytplot package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.data_exists(tvar=tvar)
diff --git a/pyspedas/utilities/datasets.py b/pyspedas/utilities/datasets.py
new file mode 100644
index 00000000..841366b0
--- /dev/null
+++ b/pyspedas/utilities/datasets.py
@@ -0,0 +1,14 @@
+from cdasws import CdasWs
+
+
+def find_datasets(mission=None, instrument=None, label=False):
+ cdas = CdasWs()
+ datasets = cdas.get_datasets(observatoryGroup=mission)
+ for index, dataset in enumerate(datasets):
+ if instrument is not None:
+ if instrument.upper() not in dataset['Id']:
+ continue
+ if label:
+ print(dataset['Id'] + ': ' + dataset['Label'])
+ else:
+ print(dataset['Id'])
diff --git a/pyspedas/utilities/download.py b/pyspedas/utilities/download.py
index ba34581f..d6c37687 100644
--- a/pyspedas/utilities/download.py
+++ b/pyspedas/utilities/download.py
@@ -11,6 +11,8 @@
from shutil import copyfileobj, copy
from tempfile import NamedTemporaryFile
from html.parser import HTMLParser
+from netCDF4 import Dataset
+from cdflib import CDF
# the following is used to parse the links from an HTML index file
@@ -27,7 +29,40 @@ def handle_starttag(self, tag, attrs):
self.links.append((link))
except AttributeError:
self.links = [(link)]
-
+
+
+def check_downloaded_file(filename):
+ """
+ Check if a file exists and if it can be opened (for CDF and netCDF files).
+
+ If the file exists but it is not CDF or netCDF, it returns True without trying to open the file.
+ """
+ result = False
+ fpath = Path(filename)
+ if fpath.is_file() and len(filename) > 3:
+ if filename[-4:] == '.cdf':
+ # Try to open the cdf file
+ try:
+ cdf_file = CDF(filename)
+ result = True
+ except:
+ logging.info("Cannot open CDF file: " + filename)
+ result = False
+ elif filename[-3:] == '.nc':
+ # Try to open the netCDF file
+ try:
+ netcdf_file = Dataset(filename)
+ result = True
+ except:
+ logging.info("Cannot open netCDF file: " + filename)
+ result = False
+ else:
+ # The file is not CDF or netCDF, print a warning and return true
+ logging.info("The file is not CDF or netCDF. Filename: " + filename)
+ result = True
+
+ return result
+
def download_file(url=None,
filename=None,
@@ -36,11 +71,11 @@ def download_file(url=None,
password=None,
verify=False,
session=None,
- basic_auth=False
- ):
+ basic_auth=False,
+ nbr_tries=0):
"""
Download a file and return its local path; this function is primarily meant to be called by the download function
-
+
Parameters:
url: str
Remote URL to download
@@ -63,14 +98,22 @@ def download_file(url=None,
session: requests.Session object
Requests session object that allows you to persist things like HTTP authentication through multiple calls
+ nbr_tries: int
+ Counts how many times we tried to download the file. Default is 0.
+
+ Notes:
+ Checks if the CDF or netCDF file can be opened, and if it can't, tries to download the file for a second time.
+
Returns:
String containing the local file name
"""
+ headers_original = headers
+ session_original = session
if session is None:
session = requests.Session()
-
+
if username is not None:
session.auth = requests.auth.HTTPDigestAuth(username, password)
@@ -91,48 +134,77 @@ def download_file(url=None,
if headers.get('If-Modified-Since') is not None:
del headers['If-Modified-Since']
- # the file hasn't changed
+ needs_to_download_file = False
if fsrc.status_code == 304:
+ # the file hasn't changed
logging.info('File is current: ' + filename)
fsrc.close()
- return filename
-
- # file not found
- if fsrc.status_code == 404:
+ elif fsrc.status_code == 404:
+ # file not found
logging.error('Remote file not found: ' + url)
fsrc.close()
return None
-
- # authentication issues
- if fsrc.status_code == 401 or fsrc.status_code == 403:
+ elif fsrc.status_code == 401 or fsrc.status_code == 403:
+ # authentication issues
logging.error('Unauthorized: ' + url)
fsrc.close()
return None
-
- if fsrc.status_code == 200:
+ elif fsrc.status_code == 200:
+ # this is the main download case
+ needs_to_download_file = True
logging.info('Downloading ' + url + ' to ' + filename)
else:
+ # all other problems
logging.error(fsrc.reason)
fsrc.close()
return None
- ftmp = NamedTemporaryFile(delete=False)
+ if needs_to_download_file:
+ ftmp = NamedTemporaryFile(delete=False)
- with open(ftmp.name, 'wb') as f:
- copyfileobj(fsrc.raw, f)
+ with open(ftmp.name, 'wb') as f:
+ copyfileobj(fsrc.raw, f)
- # make sure the directory exists
- if not os.path.exists(os.path.dirname(filename)) and os.path.dirname(filename) != '':
- os.makedirs(os.path.dirname(filename))
+ # make sure the directory exists
+ if not os.path.exists(os.path.dirname(filename)) and os.path.dirname(filename) != '':
+ os.makedirs(os.path.dirname(filename))
- # if the download was successful, copy to data directory
- copy(ftmp.name, filename)
+ # if the download was successful, copy to data directory
+ copy(ftmp.name, filename)
- fsrc.close()
- ftmp.close()
- os.unlink(ftmp.name) # delete the temporary file
-
- logging.info('Download complete: ' + filename)
+ fsrc.close()
+ ftmp.close()
+ os.unlink(ftmp.name) # delete the temporary file
+
+ logging.info('Download complete: ' + filename)
+
+ # At this point, we check if the file can be opened.
+ # If it cannot be opened, we delete the file and try again.
+ if nbr_tries == 0 and check_downloaded_file(filename) == False:
+ nbr_tries = 1
+ logging.info('There was a problem with the file: ' + filename)
+ logging.info('We are going to download it for a second time.')
+ if os.path.exists(filename):
+ os.unlink(filename)
+
+ download_file(url=url,
+ filename=filename,
+ headers=headers_original,
+ username=username,
+ password=password,
+ verify=verify,
+ session=session_original,
+ basic_auth=basic_auth,
+ nbr_tries=nbr_tries)
+
+ # If the file again cannot be opened, we give up.
+ if nbr_tries > 0 and check_downloaded_file(filename) == False:
+ nbr_tries = 2
+ logging.info('Tried twice. There was a problem with the file: ' + filename)
+ logging.info('File will be removed. Try to download it again at a later time.')
+ if os.path.exists(filename):
+ os.unlink(filename)
+ filename = None
return filename
@@ -149,7 +221,8 @@ def download(remote_path='',
no_download=False,
last_version=False,
basic_auth=False,
- regex=False):
+ regex=False,
+ no_wildcards=False):
"""
Download one or more remote files and return their local paths.
@@ -189,13 +262,16 @@ def download(remote_path='',
Flag to not download remote files
last_version: bool
- Flag to only download the last in file in a lexically sorted
+ Flag to only download the last in file in a lexically sorted
list when multiple matches are found using wildcards
regex: bool
Flag to allow regular expressions in the file name matching,
instead of unix style matching
+ no_wildcards: bool
+ Flag to assume no wild cards in the requested url/filename
+
Returns:
String list specifying the full local path to all requested files
@@ -258,9 +334,9 @@ def download(remote_path='',
short_path = local_file[:1+local_file.rfind("/")]
- if no_download is False:
+ if not no_download:
# expand the wildcards in the url
- if '?' in url or '*' in url or regex and no_download is False:
+ if ('?' in url or '*' in url or regex) and (not no_download and not no_wildcards):
if index_table.get(url_base) is not None:
links = index_table[url_base]
else:
@@ -269,10 +345,13 @@ def download(remote_path='',
# we'll need to parse the HTML index file for the file list
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=ResourceWarning)
- if not basic_auth:
- html_index = session.get(url_base, verify=verify, headers=headers)
- else:
- html_index = session.get(url_base, verify=verify, headers=headers, auth=(username, password))
+ try:
+ if not basic_auth:
+ html_index = session.get(url_base, verify=verify, headers=headers)
+ else:
+ html_index = session.get(url_base, verify=verify, headers=headers, auth=(username, password))
+ except requests.exceptions.ConnectionError:
+ continue
if html_index.status_code == 404:
logging.error('Remote index not found: ' + url_base)
@@ -300,6 +379,9 @@ def download(remote_path='',
reg_expression = re.compile(url_file)
new_links = list(filter(reg_expression.match, links))
+ if len(new_links) == 0:
+ logging.info("No links matching pattern %s found at remote index %s", url_file, url_base)
+
if last_version and len(new_links) > 1:
new_links = sorted(new_links)
new_links = [new_links[-1]]
@@ -320,7 +402,8 @@ def download(remote_path='',
continue
resp_data = download_file(url=url, filename=filename, username=username, password=password, verify=verify,
headers=headers, session=session, basic_auth=basic_auth)
-
+
+
if resp_data is not None:
if not isinstance(resp_data, list):
resp_data = [resp_data]
@@ -329,7 +412,7 @@ def download(remote_path='',
else:
# download wasn't successful, search for local files
logging.info('Searching for local files...')
-
+
if local_path == '':
local_path_to_search = str(Path('.').resolve())
else:
diff --git a/pyspedas/utilities/interpol.py b/pyspedas/utilities/interpol.py
index e88a43a1..7a576064 100644
--- a/pyspedas/utilities/interpol.py
+++ b/pyspedas/utilities/interpol.py
@@ -1,7 +1,7 @@
import numpy as np
from scipy import interpolate
-def interpol(data, data_times, out_times):
+def interpol(data, data_times, out_times, fill_value="extrapolate"):
'''
Simple wrapper around scipy's interp1d that allows you to linearly interpolate data from
one set of times to another set of times
@@ -14,9 +14,9 @@ def interpol(data, data_times, out_times):
if len(data.shape) == 2:
out = np.empty((len(out_times), len(data[0, :])))
for data_idx in np.arange(len(data[0, :])):
- interpfunc = interpolate.interp1d(data_times, data[:, data_idx], kind='linear', bounds_error=False, fill_value='extrapolate')
+ interpfunc = interpolate.interp1d(data_times, data[:, data_idx], kind='linear', bounds_error=False, fill_value=fill_value)
out[:, data_idx] = interpfunc(out_times)
return out
else:
- interpfunc = interpolate.interp1d(data_times, data, kind='linear', bounds_error=False, fill_value='extrapolate')
- return interpfunc(out_times)
\ No newline at end of file
+ interpfunc = interpolate.interp1d(data_times, data, kind='linear', bounds_error=False, fill_value=fill_value)
+ return interpfunc(out_times)
diff --git a/pyspedas/utilities/leap_seconds.py b/pyspedas/utilities/leap_seconds.py
new file mode 100644
index 00000000..eebe797b
--- /dev/null
+++ b/pyspedas/utilities/leap_seconds.py
@@ -0,0 +1,55 @@
+import os
+import datetime
+import pandas as pd
+import numpy as np
+from pyspedas.utilities.download import download
+from pyspedas import time_double
+
+
+def load_leap_table(reload=False):
+ """
+ Loads the leap second table for converting TAI to unix times
+
+ Parameters
+ -----------
+ reload: bool
+ Re-load the leap second table, even if it exists locally.
+
+ This shouldn't be needed until at least 2035:
+ https://www.scientificamerican.com/article/the-leap-seconds-time-is-up-world-votes-to-stop-pausing-clocks/
+
+ Returns
+ ---------
+ dict containing 'dates' with array of Julian dates corresponding
+ to the leap seconds in the 'leaps' array
+
+ """
+ if os.environ.get('CDF_LEAPSECONDSTABLE') is not None:
+ table_file = os.environ.get('CDF_LEAPSECONDSTABLE')
+ elif os.environ.get('SPEDAS_DATA_DIR') is not None:
+ table_file = os.path.join(os.environ.get('SPEDAS_DATA_DIR'), 'CDFLeapSeconds.txt')
+ else:
+ table_file = os.path.join('data', 'CDFLeapSeconds.txt')
+
+ table_dir = os.path.dirname(table_file)
+
+ if reload or not os.path.exists(table_file):
+ downloaded = download(remote_path='https://cdf.gsfc.nasa.gov/html/',
+ remote_file='CDFLeapSeconds.txt',
+ local_path=table_dir)
+
+ cols = ['Year', 'Month', 'Day', 'LS', 'Drift']
+ table = pd.read_csv(table_file,
+ delim_whitespace=True,
+ dtype=str,
+ names=cols,
+ comment=';',
+ skipinitialspace=True,
+ index_col=False)
+
+ leap_dates = table['Year'].to_numpy() + '-' + table['Month'].to_numpy() + '-' + table['Day'].to_numpy()
+ leap_dates = time_double(leap_dates)
+ juls = np.array(leap_dates)/86400.0 + datetime.date(1970, 1, 1).toordinal() + 1721424.5
+
+ return {'leaps': np.float64(table['LS'].to_numpy()),
+ 'juls': juls}
diff --git a/pyspedas/utilities/spice/__init__.py b/pyspedas/utilities/spice/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/utilities/spice/time_ephemeris.py b/pyspedas/utilities/spice/time_ephemeris.py
new file mode 100644
index 00000000..28033d4c
--- /dev/null
+++ b/pyspedas/utilities/spice/time_ephemeris.py
@@ -0,0 +1,45 @@
+import datetime
+import logging
+import numpy as np
+from pyspedas import time_double
+from pyspedas.utilities.interpol import interpol
+
+
+def time_ephemeris(t, et2ut=False):
+ """
+ Purpose: conversion between unix time and ephemeris time
+ Usage: et = time_ephemeris(ut) ; Converts from UT (unix/posix time) to ephemeris time
+ Or: ut = time_ephemeris(et, et2ut=True) ; Converts from ephemeris time to UT double precision (UNIX time)
+ Warning: this routine is only accurate to about 1 millisecond and does not attempt to reflect GR effects
+
+ Based on the SPEDAS IDL routine by Davin Larson
+ """
+ if not isinstance(t, float):
+ t = time_double(t)
+
+ ls_utimes = time_double(
+ ['0200-1-1', '1972-1-1', '1972-7-1', '1973-1-1', '1974-1-1', '1975-1-1', '1976-1-1', '1977-1-1', '1978-1-1',
+ '1979-1-1', '1980-1-1', '1981-7-1', '1982-7-1', '1983-7-1', '1985-7-1', '1988-1-1', '1990-1-1', '1991-1-1', '1992-7-1', '1993-7-1', '1994-7-1',
+ '1996-1-1', '1997-7-1', '1999-1-1', '2006-1-1', '2009-1-1', '2012-7-1', '2015-7-1', '2017-1-1', '3000-1-1'])
+
+ ls_num = np.arange(len(ls_utimes)) + 9
+ utc_et_diff = time_double('2000-1-1/12:00:00') - 32.184
+ ls_etimes = ls_utimes + ls_num - utc_et_diff
+ disable_time = time_double('2023-7-1') # time of next possible leap second
+
+ if time_double() > disable_time - 30*86400.0:
+ logging.warning('Warning: This procedure must be modified before ' + str(disable_time) + ' to account for potential leap second')
+
+ if time_double() > disable_time:
+ raise ValueError('Sorry! This procedure has been disabled because it was not modified to account for a possible leap second on ' + str(disable_time))
+
+ if time_double() > disable_time - 7*86400.0:
+ logging.warning('Warning: This procedure must be modified before ' + str(disable_time) + ' to account for potential leap second at that time.')
+
+ if et2ut:
+ return t - np.floor(interpol(ls_num, ls_etimes, t)) + utc_et_diff
+
+ ut = time_double(t)
+
+ return ut + np.floor(interpol(ls_num, ls_utimes, ut)) - utc_et_diff
+
diff --git a/pyspedas/utilities/tcopy.py b/pyspedas/utilities/tcopy.py
index 7d297025..af606bd5 100644
--- a/pyspedas/utilities/tcopy.py
+++ b/pyspedas/utilities/tcopy.py
@@ -6,6 +6,7 @@
Allowed wildcards are ? for a single character, * from multiple characters.
"""
+import logging
import pytplot
import pyspedas
import copy
@@ -32,7 +33,7 @@ def tcopy_one(name_in, name_out):
tvar_new = copy.deepcopy(tvar_old)
tvar_new.name = name_out
pytplot.data_quants.update({name_out: tvar_new})
- print(name_in + ' copied to ' + name_out)
+ logging.info(name_in + ' copied to ' + name_out)
def tcopy(names_in, names_out=None, suffix=None):
@@ -55,7 +56,7 @@ def tcopy(names_in, names_out=None, suffix=None):
"""
names_in = pyspedas.tnames(names_in)
if len(names_in) < 1:
- print('tcopy error: No pytplot variables found.')
+ logging.error('tcopy error: No pytplot variables found.')
return
if suffix is None:
@@ -68,7 +69,7 @@ def tcopy(names_in, names_out=None, suffix=None):
names_out = [names_out]
if len(names_in) != len(names_out):
- print('tcopy error: List with the names_in does not match list\
+ logging.error('tcopy error: List with the names_in does not match list\
with the names out.')
return
@@ -78,4 +79,4 @@ def tcopy(names_in, names_out=None, suffix=None):
if len(pyspedas.tnames(n)) == 1:
tcopy_one(n, o)
else:
- print('tplot name not found: ' + n)
+ logging.error('tplot name not found: ' + n)
diff --git a/pyspedas/utilities/tests/download_tests.py b/pyspedas/utilities/tests/download_tests.py
index 44e8e7f7..ffd0475f 100644
--- a/pyspedas/utilities/tests/download_tests.py
+++ b/pyspedas/utilities/tests/download_tests.py
@@ -17,6 +17,30 @@ def test_remote_file(self):
self.assertTrue(len(files) == 1)
self.assertTrue(files[0] == os.path.join(os.getcwd(), 'psp_swp_spc_l3i_20190401_v01.cdf'))
+ def test_wildcard(self):
+ # Test a wildcard pattern with several matches
+ files = download(remote_path='http://themis.ssl.berkeley.edu/data/themis/tha/l1/state/2008/tha_l1_state_20080323_v??.cdf')
+ self.assertTrue(len(files) == 4) # v00, v01, v02, v03 should be available on this date
+ self.assertTrue(files[3] == os.path.join(os.getcwd(), 'tha_l1_state_20080323_v03.cdf'))
+
+ def test_missing_index(self):
+ # Test a wildcard pattern on a nonexistent directory
+ # This should warn "Remote index not found"
+ files = download(remote_path='http://themis.ssl.berkeley.edu/data/themis/tha/l1/state/2006/tha_l1_state_20060323_v??.cdf')
+ self.assertTrue(len(files) == 0)
+
+ def test_last_version(self):
+ # Test a wildcard pattern with several matches, and last_version=True, returning the final (lexicographic) value
+ files = download(remote_path='http://themis.ssl.berkeley.edu/data/themis/tha/l1/state/2008/tha_l1_state_20080323_v??.cdf',last_version=True)
+ self.assertTrue(len(files) == 1) # v00, v01, v02, v03 should be available on this date, should only return v03
+ self.assertTrue(files[0] == os.path.join(os.getcwd(), 'tha_l1_state_20080323_v03.cdf'))
+
+ def test_last_version_nomatch(self):
+ # Test a wildcard pattern that doesn't match anything.
+ # This should warn about no matching file found in the index.
+ files = download(remote_path='http://themis.ssl.berkeley.edu/data/themis/tha/l1/state/2008/tha_l1_state_20080332_v??.cdf',last_version=True)
+ self.assertTrue(len(files) == 0) # Nonexistent date, nothing should be found
+
def test_remote_path_file(self):
# specifying both remote_path and remote_file saves the files to the current working directory + the path specified in remote_file
files = download(remote_path='https://spdf.gsfc.nasa.gov/pub/data/', remote_file='psp/sweap/spc/l3/l3i/2019/psp_swp_spc_l3i_20190401_v01.cdf')
diff --git a/pyspedas/utilities/tests/misc_tests.py b/pyspedas/utilities/tests/misc_tests.py
index 8d5e6c5e..6c0b9aa2 100644
--- a/pyspedas/utilities/tests/misc_tests.py
+++ b/pyspedas/utilities/tests/misc_tests.py
@@ -3,11 +3,8 @@
from pyspedas.utilities.dailynames import dailynames
from pyspedas import tcopy
-from pyspedas.utilities.time_string import (time_string, time_datetime,
- time_string_one)
-from pyspedas.utilities.time_double import (time_float_one, time_float,
- time_double)
-from pytplot import get_data, store_data
+from pytplot import data_exists, tkm2re
+from pytplot import get_data, store_data, options
class UtilTestCases(unittest.TestCase):
@@ -39,7 +36,6 @@ def test_dailynames(self):
file_format='%M', res=600.) ==
['00', '10', '20', '30', '40', '50'])
-
def test_tcopy(self):
"""Test tcopy function."""
store_data('test', data={'x': [1, 2, 3], 'y': [5, 5, 5]})
@@ -55,6 +51,21 @@ def test_tcopy(self):
tcopy('doesnt exist', 'another-copy')
tcopy(['another-copy', 'test'], 'another-copy')
+ def test_tkm2re(self):
+ store_data('test', data={'x': [1, 2, 3], 'y': [5, 5, 5]})
+ options('test', 'ysubtitle', '[Re]')
+ # convert to km
+ tkm2re('test', km=True)
+ # convert back
+ tkm2re('test_km')
+ self.assertTrue(data_exists('test_km_re'))
+ nothing = tkm2re('doesnt_exist')
+ self.assertTrue(nothing is None)
+ tkm2re('test_km', newname='another_test_km')
+ self.assertTrue(data_exists('another_test_km'))
+ anerror = tkm2re('test_km', newname=['test1_km', 'test1_km'])
+ self.assertTrue(anerror is None)
+
if __name__ == '__main__':
unittest.main()
diff --git a/pyspedas/utilities/tests/time_tests.py b/pyspedas/utilities/tests/time_tests.py
index 238f27aa..85a56a24 100644
--- a/pyspedas/utilities/tests/time_tests.py
+++ b/pyspedas/utilities/tests/time_tests.py
@@ -1,15 +1,21 @@
import unittest
from datetime import datetime, timezone
-from pyspedas.utilities.time_string import time_string, time_datetime, time_string_one
-from pyspedas.utilities.time_double import time_float_one, time_float, time_double
+from pytplot import time_string, time_datetime
+from pytplot import time_float, time_double
+
class TimeTestCases(unittest.TestCase):
def test_time_datetime(self):
"""Test time_datetime function."""
+ now = time_datetime()
+ self.assertTrue(time_datetime('2015-12-15/00:00') == datetime(2015, 12, 15, 0, 0, tzinfo=timezone.utc))
self.assertTrue(time_datetime(1450137600.0000000) == datetime(2015, 12, 15, 0, 0, tzinfo=timezone.utc))
self.assertTrue([time_datetime(1450137600.0000000), time_datetime(1444953600.0000000)]
== [datetime(2015, 12, 15, 0, 0, tzinfo=timezone.utc), datetime(2015, 10, 16, 0, 0, tzinfo=timezone.utc)])
+ self.assertTrue(time_datetime([1450137600.0000000, 1444953600.0000000])
+ == [datetime(2015, 12, 15, 0, 0, tzinfo=timezone.utc), datetime(2015, 10, 16, 0, 0, tzinfo=timezone.utc)])
+
def test_time_string(self):
"""Test time_string function."""
diff --git a/pyspedas/utilities/time_double.py b/pyspedas/utilities/time_double.py
index f159da4b..ba72ec14 100644
--- a/pyspedas/utilities/time_double.py
+++ b/pyspedas/utilities/time_double.py
@@ -12,12 +12,8 @@
Similar to time_double.pro in IDL SPEDAS.
"""
-
-from dateutil import parser
-from datetime import datetime, timezone
-import numpy as np
-from collections.abc import Iterable
-
+import pytplot
+import logging
def time_float_one(s_time=None):
"""
@@ -35,21 +31,9 @@ def time_float_one(s_time=None):
Output time.
"""
- if s_time is None:
- s_time = str(datetime.now())
-
- if isinstance(s_time, (int, float, np.integer, np.float64)):
- return float(s_time)
-
- try:
- in_datetime = parser.isoparse(s_time)
- except ValueError:
- in_datetime = parser.parse(s_time)
-
- float_time = in_datetime.replace(tzinfo=timezone.utc).timestamp()
-
- return float_time
-
+ logging.info("time_float_one has been moved to the pytplot package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.time_float_one(s_time=s_time)
def time_float(str_time=None):
"""
@@ -66,20 +50,9 @@ def time_float(str_time=None):
Output times as floats.
"""
- if str_time is None:
- return time_float_one()
- else:
- if isinstance(str_time, str):
- return time_float_one(str_time)
- else:
- time_list = list()
- if isinstance(str_time, Iterable):
- for t in str_time:
- time_list.append(time_float_one(t))
- return time_list
- else:
- return time_float_one(str_time)
-
+ logging.info("time_float has been moved to the pytplot package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.time_float(str_time=str_time)
def time_double(str_time=None):
"""
@@ -98,4 +71,6 @@ def time_double(str_time=None):
Output times as floats.
"""
- return time_float(str_time)
+ logging.info("time_double has been moved to the pytplot package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.time_float(str_time=str_time)
\ No newline at end of file
diff --git a/pyspedas/utilities/time_string.py b/pyspedas/utilities/time_string.py
index 6b390ba4..a40b3063 100644
--- a/pyspedas/utilities/time_string.py
+++ b/pyspedas/utilities/time_string.py
@@ -11,8 +11,8 @@
Compare to https://www.epochconverter.com/
"""
-from datetime import datetime, timezone
-from pyspedas.utilities.time_double import time_float
+import logging
+import pytplot
def time_string_one(float_time=None, fmt=None):
"""
@@ -33,16 +33,9 @@ def time_string_one(float_time=None, fmt=None):
Datetime as string.
"""
- if fmt is None:
- fmt = '%Y-%m-%d %H:%M:%S.%f'
-
- if float_time is None:
- str_time = datetime.now().strftime(fmt)
- else:
- str_time = datetime.utcfromtimestamp(float_time).strftime(fmt)
-
- return str_time
-
+ logging.info("time_string_one has been moved to the pytplot package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.time_string_one(float_time=float_time,fmt=fmt)
def time_string(float_time=None, fmt=None):
"""
@@ -63,17 +56,9 @@ def time_string(float_time=None, fmt=None):
Datetimes as string.
"""
- if float_time is None:
- return time_string_one(None, fmt)
- else:
- if isinstance(float_time, (int, float)):
- return time_string_one(float_time, fmt)
- else:
- time_list = list()
- for t in float_time:
- time_list.append(time_string_one(t, fmt))
- return time_list
-
+ logging.info("time_string has been moved to the pytplot package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.time_string(float_time=float_time,fmt=fmt)
def time_datetime(time=None, tz=None):
"""Find python datetime.
@@ -93,16 +78,6 @@ def time_datetime(time=None, tz=None):
Datetimes as `datetime.datetime`.
"""
- if tz is None:
- tz = timezone.utc
-
- if time is None:
- return datetime.now()
-
- if isinstance(time, str):
- return time_datetime(time_float(time))
-
- if isinstance(time, (int, float)):
- return datetime.fromtimestamp(time, tz=tz)
-
- return [time_datetime(_time) for _time in time]
+ logging.info("time_datetime has been moved to the pytplot package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.time_datetime(time=time,tz=tz)
diff --git a/pyspedas/utilities/tkm2re.py b/pyspedas/utilities/tkm2re.py
index 1af1a935..31036fd7 100644
--- a/pyspedas/utilities/tkm2re.py
+++ b/pyspedas/utilities/tkm2re.py
@@ -1,7 +1,5 @@
-
-
+import logging
import pytplot
-from pyspedas import tnames
def tkm2re(name, km=False, newname=None, suffix=''):
"""
@@ -30,62 +28,6 @@ def tkm2re(name, km=False, newname=None, suffix=''):
List of the tplot variables created
"""
- km_in_re = 6371.2
-
- names = tnames(name)
-
- if names == []:
- print('No tplot variables found: ' + name)
- return
-
- if newname is None:
- newname = [n + suffix for n in names]
-
- if km == False:
- newname = [n + '_re' for n in newname]
- else:
- newname = [n + '_km' for n in newname]
- else:
- if not isinstance(newname, list):
- newname = [newname]
-
- if len(newname) != len(names):
- print('Number of output variable names (newname) should match the number of input variables.')
- return
-
- out = []
-
- for in_tvar, out_tvar in zip(names, newname):
- data = pytplot.get_data(in_tvar)
- metadata = pytplot.get_data(in_tvar, metadata=True)
-
- if data is None:
- print('Problem reading variable: ' + in_tvar)
- continue
-
- if km == False:
- data_out = data.y/km_in_re
- else:
- data_out = data.y*km_in_re
-
- saved = pytplot.store_data(out_tvar, data={'x': data.times, 'y': data_out}, attr_dict=metadata)
-
- if not saved:
- print('Problem creating tplot variable.')
- continue
-
- # update the subtitle, if needed
- yaxis_opt = pytplot.data_quants[out_tvar].attrs['plot_options'].get('yaxis_opt')
-
- if yaxis_opt is not None:
- subtitle = yaxis_opt.get('axis_subtitle')
- if subtitle is not None:
- if km == False:
- new_subtitle = pytplot.data_quants[out_tvar].attrs['plot_options']['yaxis_opt']['axis_subtitle'].lower().replace('km', 'Re')
- else:
- new_subtitle = pytplot.data_quants[out_tvar].attrs['plot_options']['yaxis_opt']['axis_subtitle'].lower().replace('re', 'km')
- pytplot.data_quants[out_tvar].attrs['plot_options']['yaxis_opt']['axis_subtitle'] = new_subtitle
-
- out.append(out_tvar)
-
- return out
\ No newline at end of file
+ logging.info("tkm2re has been moved to the pytplot.tplot_math module. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.tplot_math.tkm2re(name=name,km=km,newname=newname,suffix=suffix)
diff --git a/pyspedas/utilities/tnames.py b/pyspedas/utilities/tnames.py
index b96cae48..682c8c70 100644
--- a/pyspedas/utilities/tnames.py
+++ b/pyspedas/utilities/tnames.py
@@ -6,9 +6,8 @@
Allowed wildcards are ? for a single character, * from multiple characters.
"""
-import fnmatch
-from pytplot import tplot_names
-
+import logging
+import pytplot
def tnames(pattern=None):
"""
@@ -25,16 +24,6 @@ def tnames(pattern=None):
List of pytplot variables.
"""
- name_list = list()
- all_names = tplot_names(quiet=True)
-
- if pattern is None:
- name_list.extend(all_names)
- else:
- if isinstance(pattern, str):
- name_list.extend(fnmatch.filter(all_names, pattern))
- else:
- for p in pattern:
- name_list.extend(fnmatch.filter(all_names, p))
-
- return name_list
+ logging.info("tnames has been moved to the pytplot package. Please update your imports!")
+ logging.info("This version will eventually be removed.")
+ return pytplot.tnames(pattern)
diff --git a/pyspedas/version.py b/pyspedas/version.py
index bb0da3ff..d4c26dc6 100644
--- a/pyspedas/version.py
+++ b/pyspedas/version.py
@@ -1,4 +1,5 @@
"""Print the version number for the current installation."""
+import logging
def version():
@@ -12,4 +13,4 @@ def version():
"""
import pkg_resources
ver = pkg_resources.get_distribution("pyspedas").version
- print("pyspedas version: " + ver)
+ logging.info("pyspedas version: " + ver)
diff --git a/pyspedas/vires/__init__.py b/pyspedas/vires/__init__.py
new file mode 100644
index 00000000..2508d15f
--- /dev/null
+++ b/pyspedas/vires/__init__.py
@@ -0,0 +1,17 @@
+from pyspedas.vires.load import load
+
+
+def data(trange=None,
+ collection=None,
+ measurements=None,
+ models=None,
+ sampling_step=None,
+ auxiliaries=None,
+ residuals=False):
+ return load(trange=trange,
+ collection=collection,
+ measurements=measurements,
+ models=models,
+ sampling_step=sampling_step,
+ auxiliaries=auxiliaries,
+ residuals=residuals)
diff --git a/pyspedas/vires/config.py b/pyspedas/vires/config.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyspedas/vires/load.py b/pyspedas/vires/load.py
new file mode 100644
index 00000000..1786174e
--- /dev/null
+++ b/pyspedas/vires/load.py
@@ -0,0 +1,65 @@
+import logging
+from viresclient import SwarmRequest
+from pyspedas import time_datetime
+from pytplot import store_data, options
+
+
+def load(trange=None,
+ collection=None,
+ measurements=None,
+ models=None,
+ sampling_step=None,
+ auxiliaries=None,
+ residuals=False):
+ """
+
+ """
+ if trange is None:
+ logging.error('No time range specified')
+ return
+
+ tr = time_datetime(trange)
+
+ if not isinstance(measurements, list):
+ measurements = [measurements]
+
+ if auxiliaries is not None and not isinstance(auxiliaries, list):
+ auxiliaries = [auxiliaries]
+
+ if models is not None:
+ if not isinstance(models, list):
+ models = [models]
+
+ request = SwarmRequest()
+ if isinstance(collection, list):
+ request.set_collection(*collection)
+ else:
+ request.set_collection(collection)
+
+ if auxiliaries is None:
+ request.set_products(measurements=measurements, models=models, sampling_step=sampling_step, residuals=residuals)
+ else:
+ request.set_products(measurements=measurements, auxiliaries=auxiliaries, models=models, sampling_step=sampling_step, residuals=residuals)
+
+ data = request.get_between(start_time=tr[0], end_time=tr[1])
+ return xarray_to_tplot(data.as_xarray())
+
+
+def xarray_to_tplot(xr):
+ out = []
+ for key in xr.keys():
+ times = xr[key].coords['Timestamp'].to_numpy()
+ saved = store_data(key, data={'x': times, 'y': xr[key].data})
+ options(key, 'ytitle', xr[key].description)
+ options(key, 'ysubtitle', '[' + xr[key].units + ']')
+
+ # find the legend if this is a vector
+ for item in xr[key].coords:
+ if item != 'Timestamp':
+ options(key, 'legend_names', xr[key].coords[item].values.tolist())
+
+ if saved:
+ out.append(key)
+ else:
+ logging.warning('Problem saving: ' + key)
+ return out
diff --git a/pyspedas/wind/__init__.py b/pyspedas/wind/__init__.py
index 75f1f7f4..a2a0d96c 100644
--- a/pyspedas/wind/__init__.py
+++ b/pyspedas/wind/__init__.py
@@ -1,5 +1,6 @@
-
from .load import load
+from pyspedas.utilities.datasets import find_datasets
+
def threedp(trange=['1999-11-5', '1999-11-6'],
datatype='3dp_emfits_e0',
@@ -10,7 +11,9 @@ def threedp(trange=['1999-11-5', '1999-11-6'],
downloadonly=False,
notplot=False,
no_update=False,
- time_clip=False):
+ berkeley=False,
+ time_clip=False,
+ addmaster=False):
"""
This function loads 3DP data
@@ -59,7 +62,7 @@ def threedp(trange=['1999-11-5', '1999-11-6'],
List of tplot variables created.
"""
- return load(instrument='3dp', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+ return load(instrument='3dp', berkeley=berkeley, addmaster=addmaster, trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
def orbit(trange=['1999-11-5', '1999-11-6'],
datatype='pre_or',
@@ -360,3 +363,7 @@ def swe(trange=['2018-11-5', '2018-11-6'],
"""
return load(instrument='swe', trange=trange, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update)
+
+
+def datasets(instrument=None, label=True):
+ return find_datasets(mission='Wind', instrument=instrument, label=label)
diff --git a/pyspedas/wind/load.py b/pyspedas/wind/load.py
index 1b12efad..27d4ed56 100644
--- a/pyspedas/wind/load.py
+++ b/pyspedas/wind/load.py
@@ -1,13 +1,15 @@
+import os
from pyspedas.utilities.dailynames import dailynames
from pyspedas.utilities.download import download
-from pyspedas.analysis.time_clip import time_clip as tclip
+from pytplot import time_clip as tclip
from pytplot import cdf_to_tplot
from .config import CONFIG
def load(trange=['2013-11-5', '2013-11-6'],
instrument='fgm',
- datatype='h0',
+ datatype='h0',
+ prefix='',
suffix='',
get_support_data=False,
varformat=None,
@@ -15,7 +17,9 @@ def load(trange=['2013-11-5', '2013-11-6'],
downloadonly=False,
notplot=False,
no_update=False,
- time_clip=False):
+ berkeley=False,
+ time_clip=False,
+ addmaster=False):
"""
This function loads data from the WIND mission; this function is not meant
to be called directly; instead, see the wrappers:
@@ -28,38 +32,61 @@ def load(trange=['2013-11-5', '2013-11-6'],
"""
+ if berkeley:
+ remote_data_dir = 'http://themis.ssl.berkeley.edu/data/wind/'
+ else:
+ remote_data_dir = CONFIG['remote_data_dir']
+
+ local_master_dir = CONFIG['local_data_dir']+'wind_masters/'
+
+ masterpath = 'https://cdaweb.gsfc.nasa.gov/pub/software/cdawlib/0MASTERS/'
if instrument == 'fgm':
pathformat = 'mfi/mfi_'+datatype+'/%Y/wi_'+datatype+'_mfi_%Y%m%d_v??.cdf'
+ masterfile = 'wi_'+datatype+'_mfi_00000000_v01.cdf'
elif instrument == 'swe':
pathformat = 'swe/swe_'+datatype+'/%Y/wi_'+datatype+'_swe_%Y%m%d_v??.cdf'
+ masterfile = 'wi_'+datatype+'_swe_00000000_v01.cdf'
elif instrument == 'sms':
pathformat = 'sms/'+datatype+'/sms_'+datatype+'/%Y/wi_'+datatype+'_sms_%Y%m%d_v??.cdf'
+ masterfile = 'wi_' + datatype + '_sms_00000000_v01.cdf'
elif instrument == 'waves':
pathformat = 'waves/wav_'+datatype+'/%Y/wi_'+datatype+'_wav_%Y%m%d_v??.cdf'
+ masterfile = 'wi_' + datatype + '_waves_00000000_v01.cdf'
elif instrument == 'orbit':
pathformat = 'orbit/'+datatype+'/%Y/wi_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf'
+ masterfile = 'wi_' + datatype + '_orbit_00000000_v01.cdf'
elif instrument == '3dp':
+ prefix = 'wi_' + datatype + '_'
if datatype == '3dp_emfits_e0':
+ prefix = ''
pathformat = '3dp/'+datatype+'/%Y/wi_'+datatype.split('_')[1]+'_'+datatype.split('_')[2]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf'
+ masterfile = 'wi_' + datatype.split('_')[1]+'_'+datatype.split('_')[2] + '_3dp_00000000_v01.cdf'
else:
- pathformat = '3dp/'+datatype+'/%Y/wi_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf'
+ if not berkeley:
+ pathformat = '3dp/'+datatype+'/%Y/wi_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf'
+ masterfile = 'wi_' + datatype.split('_')[1]+'_'+datatype.split('_')[0] + '_00000000_v01.cdf'
+ else:
+ pathformat = '3dp/'+datatype+'/%Y/wi_'+datatype+'_3dp_%Y%m%d_v??.cdf'
+ masterfile = 'wi_' + datatype + '_3dp_00000000_v01.cdf'
# find the full remote path names using the trange
remote_names = dailynames(file_format=pathformat, trange=trange)
out_files = []
- files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True)
- if files is not None:
- for file in files:
- out_files.append(file)
+ if addmaster:
+ mfile = download(remote_file=masterfile,remote_path=masterpath,local_path=local_master_dir,no_download=no_update,last_version=True)
+ else:
+ mfile = [None]
+
+ datafiles = download(remote_file=remote_names, remote_path=remote_data_dir, local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True)
- out_files = sorted(out_files)
+ out_files.extend(datafiles)
if downloadonly:
return out_files
- tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
+ tvars = cdf_to_tplot(out_files, mastercdf=mfile[0], prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot)
if notplot:
return tvars
diff --git a/pyspedas/wind/tests/tests.py b/pyspedas/wind/tests/tests.py
index 77cc8f3c..20e9dbdf 100644
--- a/pyspedas/wind/tests/tests.py
+++ b/pyspedas/wind/tests/tests.py
@@ -1,15 +1,109 @@
-
import os
+import logging
import unittest
-from pyspedas.utilities.data_exists import data_exists
-
+from pytplot import data_exists
import pyspedas
+
class LoadTestCases(unittest.TestCase):
def test_downloadonly(self):
files = pyspedas.wind.mfi(trange=['2013-11-5', '2013-11-6'], downloadonly=True)
self.assertTrue(os.path.exists(files[0]))
+ def test_load_3dp_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2003-09-5', '2003-09-6'], notplot=True)
+ self.assertTrue('N_e_dens_wi_3dp' in tdp_vars)
+
+ def test_load_3dp_pm_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2018-11-06', '2018-11-07'], datatype='3dp_pm', time_clip=True, notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_pm_P_VELS' in tdp_vars)
+
+ def test_load_3dp_ehpd_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2018-11-06', '2018-11-07'], datatype='3dp_ehpd', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_ehpd_FLUX' in tdp_vars)
+
+ def test_load_3dp_ehsp_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_ehsp', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_ehsp_FLUX' in tdp_vars)
+
+ def test_load_3dp_elm2_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_elm2', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_elm2_FLUX' in tdp_vars)
+
+ def test_load_3dp_elpd_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_elpd', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_elpd_FLUX' in tdp_vars)
+
+ def test_load_3dp_elsp_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_elsp', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_elsp_FLUX' in tdp_vars)
+
+ def test_load_3dp_em_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_em', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_em_E_DENS' in tdp_vars)
+
+ def test_load_3dp_emfits_e0_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2003-10-11', '2003-10-12'], datatype='3dp_emfits_e0', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('N_e_dens_wi_3dp' in tdp_vars)
+
+ def test_load_3dp_k0_data(self):
+ # Note: this datatype is periodically pruned, SPDF seems to keep the last 3 years worth.
+ tdp_vars = pyspedas.wind.threedp(trange=['2023-01-01', '2023-01-02'], datatype='3dp_k0', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_k0_elect_density' in tdp_vars)
+
+ def test_load_3dp_phsp_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_phsp', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_phsp_FLUX' in tdp_vars)
+ def test_varformat_star(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_phsp', time_clip=True,
+ notplot=True, addmaster=True, varformat='*')
+ self.assertTrue('wi_3dp_phsp_FLUX' in tdp_vars)
+
+ def test_varformat_dots(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_phsp', time_clip=True,
+ notplot=True, addmaster=True, varformat='....')
+ self.assertTrue('wi_3dp_phsp_FLUX' in tdp_vars)
+
+ def test_load_3dp_plsp_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_plsp', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_plsp_FLUX' in tdp_vars)
+
+ def test_load_3dp_varformat_alternation(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_plsp', time_clip=True,
+ notplot=True, addmaster=True,varformat='MOM\.P\.FLUX|MOM\.P\.VELOCITY|MOM\.P\.PTENS')
+ self.assertTrue('wi_3dp_plsp_MOM.P.FLUX' in tdp_vars)
+ self.assertTrue('wi_3dp_plsp_MOM.P.VELOCITY' in tdp_vars)
+ self.assertTrue('wi_3dp_plsp_MOM.P.PTENS' in tdp_vars)
+
+ def test_load_3dp_sfpd_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_sfpd', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_sfpd_FLUX' in tdp_vars)
+ def test_load_3dp_sfsp_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_sfsp', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_sfsp_FLUX' in tdp_vars)
+
+ def test_load_3dp_sopd_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_sopd', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_sopd_FLUX' in tdp_vars)
+
+ def test_load_3dp_sosp_data(self):
+ tdp_vars = pyspedas.wind.threedp(trange=['2019-11-06', '2019-11-07'], datatype='3dp_sosp', time_clip=True,
+ notplot=True, addmaster=True)
+ self.assertTrue('wi_3dp_sosp_FLUX' in tdp_vars)
+
def test_load_mfi_data(self):
mfi_vars = pyspedas.wind.mfi(trange=['2013-11-5', '2013-11-6'], time_clip=True)
self.assertTrue(data_exists('BGSE'))
@@ -41,5 +135,6 @@ def test_load_sms_data(self):
self.assertTrue(data_exists('C_ion_temp'))
self.assertTrue(data_exists('O_ion_temp'))
+
if __name__ == '__main__':
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/requirements.txt b/requirements.txt
index 7b6fcf31..0141b428 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,10 +1,11 @@
numpy>=1.19.5
requests
-cdflib>=0.4.3
-pytplot-mpl-temp>=2.0.8
+cdflib<1.0.0
+pytplot-mpl-temp>=2.1.47
cdasws>=1.7.24
netCDF4
pywavelets
astropy
-geopack>=1.0.9
+geopack>=1.0.10
hapiclient>=0.2.2
+viresclient
diff --git a/setup.py b/setup.py
index 7aa636d5..95e48919 100755
--- a/setup.py
+++ b/setup.py
@@ -7,8 +7,8 @@
setup(
name='pyspedas',
- version='1.3.20',
- description='Python Space Physics Environment Data Analysis Software (SPEDAS)',
+ version='1.4.44',
+ description='Python Space Physics Environment Data Analysis Software (pySPEDAS)',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
url='https://github.com/spedas/pyspedas',
@@ -25,10 +25,10 @@
project_urls={'Information': 'http://spedas.org/wiki/',
},
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
- install_requires=['numpy>=1.19.5', 'requests', 'geopack>=1.0.9',
- 'cdflib>=0.4.3', 'cdasws>=1.7.24', 'netCDF4',
+ install_requires=['numpy>=1.19.5', 'requests', 'geopack>=1.0.10',
+ 'cdflib<1.0.0', 'cdasws>=1.7.24', 'netCDF4>=1.6.2',
'pywavelets', 'astropy', 'hapiclient>=0.2.2',
- 'pytplot-mpl-temp>=2.0.8'],
- python_requires='>=3.7',
+ 'pytplot-mpl-temp>=2.1.47', 'viresclient'],
+ python_requires='>=3.8',
include_package_data=True,
)