diff --git a/.github/workflows/linting.yaml b/.github/workflows/linting.yaml new file mode 100644 index 00000000..d89171b7 --- /dev/null +++ b/.github/workflows/linting.yaml @@ -0,0 +1,16 @@ +name: code-style + +on: + push: + branches: 'main' + pull_request: + branches: '*' + +jobs: + linting: + name: 'pre-commit hooks' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..672078c9 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,38 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: debug-statements + - id: check-docstring-first + - id: check-json + + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black-jupyter + + - repo: https://github.com/asottile/reorder-python-imports + rev: v3.12.0 + hooks: + - id: reorder-python-imports + args: [--py38-plus, --add-import, 'from __future__ import annotations'] + + - repo: https://github.com/asottile/add-trailing-comma + rev: v3.1.0 + hooks: + - id: add-trailing-comma + + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.2 + hooks: + - id: pyupgrade + args: [--py38-plus] + + - repo: https://github.com/PyCQA/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + args: [--max-line-length=120] diff --git a/README.md b/README.md index 945d681d..d60a4ceb 100644 --- a/README.md +++ b/README.md @@ -64,10 +64,10 @@ or you can view `${CUPID_ROOT}/examples/coupled-model/computed_notebooks/quick-r Furthermore, to clear the `computed_notebooks` folder which was generated by the `cupid-run` and `cupid-build` commands, you can run the following command: ``` bash -$ cupid-clear +$ cupid-clear ``` -This will clear the `computed_notebooks` folder which is at the location pointed to by the `run_dir` variable in the `config.yml` file. +This will clear the `computed_notebooks` folder which is at the location pointed to by the `run_dir` variable in the `config.yml` file. ### CUPiD Options diff --git a/cupid/build.py b/cupid/build.py index d5778b84..1e80e269 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -5,8 +5,8 @@ The main function `build()` reads the configuration file (default config.yml), extracts the necessary information such as the name of the book and the -directory containing computed notebooks, and then proceeds to clean and build the -Jupyter book using the `jupyter-book` command-line tool. +directory containing computed notebooks, and then proceeds to clean and build +the Jupyter book using the `jupyter-book` command-line tool. Args: CONFIG_PATH: str, path to configuration file (default config.yml) @@ -14,10 +14,11 @@ Returns: None """ +from __future__ import annotations -import click import subprocess -import sys + +import click import yaml @@ -34,7 +35,7 @@ def build(config_path): None """ - with open(config_path, "r") as fid: + with open(config_path) as fid: control = yaml.safe_load(fid) sname = control["data_sources"]["sname"] @@ -42,14 +43,14 @@ def build(config_path): subprocess.run(["jupyter-book", "clean", f"{run_dir}/computed_notebooks/{sname}"]) subprocess.run( - ["jupyter-book", "build", f"{run_dir}/computed_notebooks/{sname}", "--all"] + ["jupyter-book", "build", f"{run_dir}/computed_notebooks/{sname}", "--all"], ) # Originally used this code to copy jupyter book HTML to a location to host it online - # if 'publish_location' in control: + # if "publish_location" in control: - # user = os.environ.get('USER') + # user = os.environ.get("USER") # remote_mach = control["publish_location"]["remote_mach"] # remote_dir = control["publish_location"]["remote_dir"] # this seems more complicated than expected...people have mentioned paramiko library? diff --git a/cupid/clear.py b/cupid/clear.py index ea886fd3..d72d1b7f 100755 --- a/cupid/clear.py +++ b/cupid/clear.py @@ -1,24 +1,27 @@ #!/usr/bin/env python """ -This script provides functionality to clear the contents of the 'computed_notebooks' folder -at the location specified by the 'run_dir' variable in the CONFIG_PATH. +This script provides functionality to clear the contents of the "computed_notebooks" folder +at the location specified by the "run_dir" variable in the CONFIG_PATH. The main function `clear()` takes the path to the configuration file as input, reads the config file -to obtain the 'run_dir' variable, and then deletes the contents of the 'computed_notebooks' folder +to obtain the "run_dir" variable, and then deletes the contents of the "computed_notebooks" folder at that location. """ +from __future__ import annotations import os import shutil + import click + import cupid.util def read_config_file(config_path): """ Given the file path to the configuration file, this function reads the config file content and - returns the val of the run_dir string with '/computed_notebooks' appended to it + returns the val of the run_dir string with `/computed_notebooks` appended to it Args: CONFIG_PATH: str, path to configuration file (default config.yml) @@ -31,11 +34,11 @@ def read_config_file(config_path): run_dir = control["data_sources"].get("run_dir", None) if run_dir: - # Append '/computed_notebooks' to the run_dir value if it is not empty + # Append `/computed_notebooks` to the run_dir value if it is not empty full_path = os.path.join(run_dir, "computed_notebooks") return full_path - # else run_dir is empty/wasn't found in config file so return error + # else run_dir is empty/was not found in config file so return error raise ValueError("'run_dir' was empty/not found in the config file.") @@ -43,14 +46,14 @@ def read_config_file(config_path): @click.argument("config_path", default="config.yml") # Entry point to this script def clear(config_path): - """Clears the contents of the 'computed_notebooks' folder at the location - specified by the 'run_dir' variable in the CONFIG_PATH. + """Clears the contents of the "computed_notebooks" folder at the location + specified by the "run_dir" variable in the CONFIG_PATH. Args: CONFIG_PATH - The path to the configuration file. """ run_dir = read_config_file(config_path) - # Delete the 'computed_notebooks' folder and all the contents inside of it + # Delete the "computed_notebooks" folder and all the contents inside of it shutil.rmtree(run_dir) print(f"All contents in {run_dir} have been cleared.") diff --git a/cupid/quickstart.py b/cupid/quickstart.py index 8c178641..045d6f37 100644 --- a/cupid/quickstart.py +++ b/cupid/quickstart.py @@ -1,3 +1,4 @@ -### To be created: a script, maybe called through a command line entry point, -### that sets up a directory with a config.yml file and -### basics necessary to set up a notebook collection +# To be created: a script, maybe called through a command line entry point, +# that sets up a directory with a config.yml file and +# basics necessary to set up a notebook collection +from __future__ import annotations diff --git a/cupid/read.py b/cupid/read.py index 03ec029d..16af2bf9 100644 --- a/cupid/read.py +++ b/cupid/read.py @@ -6,6 +6,7 @@ - get_collection(path_to_catalog, **kwargs): Get a collection of datasets from an intake catalog based on specified criteria. """ +from __future__ import annotations import intake import yaml @@ -21,21 +22,22 @@ def read_yaml(path_to_yaml): def get_collection(path_to_catalog, **kwargs): """Get collection of datasets from intake catalog""" cat = intake.open_esm_datastore(path_to_catalog) - ### note that the json file points to the csv, so the path that the - ### yaml file contains doesn't actually get used. this can cause issues + # note that the json file points to the csv, so the path that the + # yaml file contains does not actually get used. this can cause issues cat_subset = cat.search(**kwargs) if "variable" in kwargs.keys(): # pylint: disable=invalid-name def preprocess(ds): - ## the double brackets return a Dataset rather than a DataArray - ## this is fragile and could cause issues, not sure what subsetting on time_bound does + # the double brackets return a Dataset rather than a DataArray + # this is fragile and could cause issues, not sure what subsetting on time_bound does return ds[[kwargs["variable"], "time_bound"]] - ## not sure what the chunking kwarg is doing here either + # not sure what the chunking kwarg is doing here either dsets = cat_subset.to_dataset_dict( - xarray_open_kwargs={"chunks": {"time": -1}}, preprocess=preprocess + xarray_open_kwargs={"chunks": {"time": -1}}, + preprocess=preprocess, ) else: diff --git a/cupid/run.py b/cupid/run.py index 536ccc5f..d934cc78 100755 --- a/cupid/run.py +++ b/cupid/run.py @@ -1,5 +1,4 @@ #!/usr/bin/env python - """ Main script for running all notebooks and scripts specified in the configuration file. @@ -21,19 +20,24 @@ -config_path Path to the YAML configuration file containing specifications for notebooks (default: config.yml) -h, --help Show this message and exit. """ +from __future__ import annotations import os import warnings + import click import intake import ploomber -import cupid.util + import cupid.timeseries +import cupid.util CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) # fmt: off # pylint: disable=line-too-long + + @click.command(context_settings=CONTEXT_SETTINGS) @click.option("--serial", "-s", is_flag=True, help="Do not use LocalCluster objects") @click.option("--time-series", "-ts", is_flag=True, help="Run time series generation scripts prior to diagnostics") @@ -135,7 +139,7 @@ def run( output_dir = run_dir + "/computed_notebooks/" + control["data_sources"]["sname"] temp_data_path = run_dir + "/temp_data" nb_path_root = os.path.realpath( - os.path.expanduser(control["data_sources"]["nb_path_root"]) + os.path.expanduser(control["data_sources"]["nb_path_root"]), ) ##################################################################### @@ -147,7 +151,7 @@ def run( if "path_to_cat_json" in control["data_sources"]: full_cat_path = os.path.realpath( - os.path.expanduser(control["data_sources"]["path_to_cat_json"]) + os.path.expanduser(control["data_sources"]["path_to_cat_json"]), ) full_cat = intake.open_esm_datastore(full_cat_path) @@ -159,7 +163,7 @@ def run( # This pulls out the name of the catalog from the path cat_subset_name = full_cat_path.split("/")[-1].split(".")[0] + "_subset" cat_subset.serialize( - directory=temp_data_path, name=cat_subset_name, catalog_type="file" + directory=temp_data_path, name=cat_subset_name, catalog_type="file", ) cat_path = temp_data_path + "/" + cat_subset_name + ".json" else: @@ -191,7 +195,7 @@ def run( all_nbs[nb]["output_dir"] = output_dir + "/" + comp_name elif comp_bool and not all: warnings.warn( - f"No notebooks for {comp_name} component specified in config file." + f"No notebooks for {comp_name} component specified in config file.", ) # Checking for existence of environments @@ -200,9 +204,9 @@ def run( if not control["env_check"][info["kernel_name"]]: bad_env = info["kernel_name"] warnings.warn( - f"Environment {bad_env} specified for {nb}.ipynb could not be found;"+ - f" {nb}.ipynb will not be run."+ - f"See README.md for environment installation instructions." + f"Environment {bad_env} specified for {nb}.ipynb could not be found;" + + f" {nb}.ipynb will not be run." + + "See README.md for environment installation instructions.", ) all_nbs.pop(nb) @@ -234,7 +238,7 @@ def run( all_scripts[script]["nb_path_root"] = nb_path_root + "/" + comp_name elif comp_bool and not all: warnings.warn( - f"No scripts for {comp_name} component specified in config file." + f"No scripts for {comp_name} component specified in config file.", ) # Checking for existence of environments @@ -243,8 +247,8 @@ def run( if not control["env_check"][info["kernel_name"]]: bad_env = info["kernel_name"] warnings.warn( - f"Environment {bad_env} specified for {script}.py could not be found;"+ - f"{script}.py will not be run." + f"Environment {bad_env} specified for {script}.py could not be found;" + + f"{script}.py will not be run.", ) all_scripts.pop(script) diff --git a/cupid/timeseries.py b/cupid/timeseries.py index 6315c16a..c1748eff 100644 --- a/cupid/timeseries.py +++ b/cupid/timeseries.py @@ -1,16 +1,17 @@ """ Timeseries generation tool adapted from ADF for general CUPiD use. """ - # ++++++++++++++++++++++++++++++ # Import standard python modules # ++++++++++++++++++++++++++++++ +from __future__ import annotations import glob import multiprocessing as mp import os import subprocess from pathlib import Path + import xarray as xr @@ -124,7 +125,7 @@ def create_time_series( for year in range(start_year, end_year + 1): # Add files to main file list: for fname in starting_location.glob( - f"*{hist_str}.*{str(year).zfill(4)}*.nc" + f"*{hist_str}.*{str(year).zfill(4)}*.nc", ): files_list.append(fname) # End for @@ -135,7 +136,9 @@ def create_time_series( # Open an xarray dataset from the first model history file: hist_file_ds = xr.open_dataset( - hist_files[0], decode_cf=False, decode_times=False + hist_files[0], + decode_cf=False, + decode_times=False, ) # Get a list of data variables in the 1st hist file: @@ -221,13 +224,13 @@ def create_time_series( # create copy of var list that can be modified for derivable variables if diag_var_list == ["process_all"]: print("generating time series for all variables") - # TODO: this doesn't seem to be working for ocn... + # TODO: this does not seem to be working for ocn... diag_var_list = hist_file_var_list for var in diag_var_list: if var not in hist_file_var_list: if component == "ocn": print( - "ocean vars seem to not be present in all files and thus cause errors" + "ocean vars seem to not be present in all files and thus cause errors", ) continue if ( @@ -325,7 +328,8 @@ def create_time_series( if vars_to_derive: if component == "atm": derive_cam_variables( - vars_to_derive=vars_to_derive, ts_dir=ts_dir[case_idx] + vars_to_derive=vars_to_derive, + ts_dir=ts_dir[case_idx], ) if serial: @@ -357,7 +361,7 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None): # PRECT can be found by simply adding PRECL and PRECC # grab file names for the PRECL and PRECC files from the case ts directory if glob.glob(os.path.join(ts_dir, "*PRECC*")) and glob.glob( - os.path.join(ts_dir, "*PRECL*") + os.path.join(ts_dir, "*PRECL*"), ): constit_files = sorted(glob.glob(os.path.join(ts_dir, "*PREC*"))) else: @@ -374,7 +378,7 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None): else: print( f"[{__name__}] Warning: PRECT file was found and overwrite is False" - + "Will use existing file." + + "Will use existing file.", ) continue # append PRECC to the file containing PRECL @@ -385,7 +389,7 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None): # RESTOM = FSNT-FLNT # Have to be more precise than with PRECT because FSNTOA, FSTNC, etc are valid variables if glob.glob(os.path.join(ts_dir, "*.FSNT.*")) and glob.glob( - os.path.join(ts_dir, "*.FLNT.*") + os.path.join(ts_dir, "*.FLNT.*"), ): input_files = [ sorted(glob.glob(os.path.join(ts_dir, f"*.{v}.*"))) @@ -408,12 +412,12 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None): else: print( f"[{__name__}] Warning: RESTOM file was found and overwrite is False." - + "Will use existing file." + + "Will use existing file.", ) continue # append FSNT to the file containing FLNT os.system(f"ncks -A -v FLNT {constit_files[0]} {constit_files[1]}") # create new file with the difference of FLNT and FSNT os.system( - f"ncap2 -s 'RESTOM=(FSNT-FLNT)' {constit_files[1]} {derived_file}" + f"ncap2 -s 'RESTOM=(FSNT-FLNT)' {constit_files[1]} {derived_file}", ) diff --git a/cupid/util.py b/cupid/util.py index c8d89915..2d5b9cc6 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -13,17 +13,19 @@ - ManageCondaKernel: Class for managing conda kernels. - MarkdownJinjaEngine: Class for using the Jinja Engine to run notebooks. """ +from __future__ import annotations import os import sys -from pathlib import Path import warnings +from pathlib import Path + import jupyter_client import papermill as pm import ploomber -from papermill.engines import NBClientEngine -from jinja2 import Template import yaml +from jinja2 import Template +from papermill.engines import NBClientEngine class MarkdownJinjaEngine(NBClientEngine): @@ -45,7 +47,7 @@ def execute_managed_notebook(cls, nb_man, kernel_name, **kwargs): def get_control_dict(config_path): """Get control dictionary from configuration file""" try: - with open(config_path, "r") as fid: + with open(config_path) as fid: control = yaml.safe_load(fid) except FileNotFoundError: print(f"ERROR: {config_path} not found") @@ -63,7 +65,8 @@ def get_control_dict(config_path): if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( - f"No conda environment specified for {nb}.ipynb and no default kernel set, will use cupid-analysis environment." + f"No conda environment specified for {nb}.ipynb and" + + " no default kernel set, will use cupid-analysis environment.", ) if info["kernel_name"] not in control["env_check"]: control["env_check"][info["kernel_name"]] = ( @@ -78,7 +81,8 @@ def get_control_dict(config_path): if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( - f"No environment specified for {script}.py and no default kernel set, will use cupid-analysis environment." + f"No environment specified for {script}.py and no default" + + " kernel set, will use cupid-analysis environment.", ) if info["kernel_name"] not in control["env_check"]: control["env_check"][info["kernel_name"]] = ( @@ -118,7 +122,7 @@ def setup_book(config_path): path_to_here = os.path.dirname(os.path.realpath(__file__)) - with open(f"{path_to_here}/_jupyter-book-config-defaults.yml", "r") as fid: + with open(f"{path_to_here}/_jupyter-book-config-defaults.yml") as fid: config = yaml.safe_load(fid) # update defaults @@ -132,7 +136,14 @@ def setup_book(config_path): def create_ploomber_nb_task( - nb, info, cat_path, nb_path_root, output_dir, global_params, dag, dependency=None + nb, + info, + cat_path, + nb_path_root, + output_dir, + global_params, + dag, + dependency=None, ): """ Creates a ploomber task for running a notebook, including necessary parameters. @@ -153,7 +164,7 @@ def create_ploomber_nb_task( parameter_groups = info["parameter_groups"] - ### passing in subset kwargs if they're provided + # passing in subset kwargs if they're provided if "subset" in info: subset_kwargs = info["subset"] else: @@ -169,7 +180,7 @@ def create_ploomber_nb_task( output_path = f"{output_dir}/{output_name}" - ### all of these things should be optional + # all of these things should be optional parms_in = dict(**default_params) parms_in.update(**global_params) parms_in.update(dict(**parms)) @@ -206,7 +217,13 @@ def create_ploomber_nb_task( def create_ploomber_script_task( - script, info, cat_path, nb_path_root, global_params, dag, dependency=None + script, + info, + cat_path, + nb_path_root, + global_params, + dag, + dependency=None, ): """ Creates a Ploomber task for running a script, including necessary parameters. @@ -229,7 +246,7 @@ def create_ploomber_script_task( parameter_groups = info["parameter_groups"] - ### passing in subset kwargs if they're provided + # passing in subset kwargs if they're provided if "subset" in info: subset_kwargs = info["subset"] else: @@ -245,7 +262,7 @@ def create_ploomber_script_task( # output_path = f"{output_dir}/{output_name}" - ### all of these things should be optional + # all of these things should be optional parms_in = dict(**default_params) parms_in.update(**global_params) parms_in.update(dict(**parms)) diff --git a/docs/NCARtips.rst b/docs/NCARtips.rst index cef2ff0c..80201253 100644 --- a/docs/NCARtips.rst +++ b/docs/NCARtips.rst @@ -1,2 +1,2 @@ .. include:: NCAR_tips.md - :parser: myst \ No newline at end of file + :parser: myst diff --git a/docs/addingnotebookstocollection.md b/docs/addingnotebookstocollection.md index bf54ddd7..c4bee823 100644 --- a/docs/addingnotebookstocollection.md +++ b/docs/addingnotebookstocollection.md @@ -23,7 +23,7 @@ Generally, a good fit for a diagnostic notebook is one that reads in CESM output none: param_specific_to_this_nb: some_value another_param: another_value - + If you just want the notebook run once on one set of parameters, keep the `parameter_groups: none:` notation as above. If you want the notebook executed multiple times with different parameter sets, the notation would look like this: your_new_nb_name: @@ -39,5 +39,5 @@ Generally, a good fit for a diagnostic notebook is one that reads in CESM output 6. If you'd like your new notebook included in the final Jupyter Book, add it to the Jupyter Book table of contents (`book_toc`). See [Jupyter Book's documentation](https://jupyterbook.org/en/stable/structure/toc.html) for different things you can do with this. 7. Update your parameters. Parameters that are specific to just this notebook should go under `parameter_groups` in the notebook's entry under `compute_notebooks`. Global parameters that you want passed in to every notebook in the collection should go under `global_params`. When `CUPiD` executes your notebook, all of these parameters will get put in a new cell below the cell tagged `parameters` that you added in step 3. This means they will supercede the values of the parameters that you put in the cell above---the names, notation, etc. should match to make sure your notebook is able to find the variables it needs. - + 8. All set! Your collection can now be run and built with `cupid-run` and `cupid-build` as usual. diff --git a/docs/conf.py b/docs/conf.py index 39514e05..bd59efa2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -3,16 +3,16 @@ # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html - # -- Path setup -------------------------------------------------------------- - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -import os -import sys +from __future__ import annotations + import datetime +import os import re +import sys sys.path.insert(0, os.path.abspath("../..")) @@ -26,7 +26,7 @@ os.system(f"cp ../{file} ./") # Remove any images from the first line of the file - with open(file, "r") as f: + with open(file) as f: file1 = f.readline() file1 = re.sub(" ", "", file1) file_rest = f.read() @@ -39,7 +39,7 @@ project = "CUPiD" current_year = datetime.datetime.now().year -copyright = "{}, University Corporation for Atmospheric Research".format(current_year) +copyright = f"{current_year}, University Corporation for Atmospheric Research" author = "NSF NCAR" @@ -50,7 +50,7 @@ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# extensions coming with Sphinx (named "sphinx.ext.*") or your custom # ones. extensions = [ @@ -85,7 +85,7 @@ # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] +# source_suffix = [".rst", ".md"] source_suffix = { ".rst": "restructuredtext", ".ipynb": "myst-nb", @@ -106,7 +106,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = dict( - # analytics_id='' this is configured in rtfd.io + # analytics_id="" this is configured in rtfd.io # canonical_url="", repository_url="https://github.com/NCAR/CUPiD", repository_branch="main", @@ -115,7 +115,7 @@ use_repository_button=True, use_issues_button=True, home_page_in_toc=True, - extra_footer="The National Center for Atmospheric Research is sponsored by the National Science Foundation. Any opinions, findings and conclusions or recommendations expressed in this material do not necessarily reflect the views of the National Science Foundation.", + extra_footer="The National Center for Atmospheric Research is sponsored by the National Science Foundation. Any opinions, findings and conclusions or recommendations expressed in this material do not necessarily reflect the views of the National Science Foundation.", # noqa ) # Add any paths that contain custom static files (such as style sheets) here, diff --git a/docs/index.rst b/docs/index.rst index 8a1fef1a..0f8629bd 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,4 +10,3 @@ CUPiD Documentation .. include:: README.md :parser: myst - diff --git a/environments/docs.yml b/environments/docs.yml index ff4c40bf..381c085a 100644 --- a/environments/docs.yml +++ b/environments/docs.yml @@ -9,4 +9,4 @@ dependencies: - sphinx-book-theme - myst-nb - sphinx-design - - nbsphinx \ No newline at end of file + - nbsphinx diff --git a/examples/coupled_model/config.yml b/examples/coupled_model/config.yml index 121217e8..4fdabc08 100644 --- a/examples/coupled_model/config.yml +++ b/examples/coupled_model/config.yml @@ -1,4 +1,3 @@ - ################## SETUP ################## ################ @@ -33,7 +32,7 @@ computation_config: ### notebook in NOTEBOOK CONFIG default_kernel_name: cupid-analysis - + ############# NOTEBOOK CONFIG ############# ############################ @@ -54,8 +53,8 @@ timeseries: case_name: 'b.e23_alpha16b.BLT1850.ne30_t232.054' atm: - vars: ['ACTNI', 'ACTNL', 'ACTREI','ACTREL','AODDUST'] - derive_vars: [] # {'PRECT':['PRECL','PRECC'], 'RESTOM':['FLNT','FSNT']} + vars: ['ACTNI', 'ACTNL', 'ACTREI', 'ACTREL', 'AODDUST'] + derive_vars: [] # {'PRECT':['PRECL', 'PRECC'], 'RESTOM':['FLNT', 'FSNT']} hist_str: 'h0' start_years: [2] end_years: [102] @@ -96,12 +95,12 @@ timeseries: compute_notebooks: # This is where all the notebooks you want run and their - ### parameters are specified. Several examples of different - ### types of notebooks are provided. + # parameters are specified. Several examples of different + # types of notebooks are provided. # The first key (here simple_no_params_nb) is the name of the - ### notebook from nb_path_root, minus the .ipynb - + # notebook from nb_path_root, minus the .ipynb + infrastructure: index: parameter_groups: @@ -128,7 +127,7 @@ compute_notebooks: native: 'mom6.h.native.????-??.nc' static: 'mom6.h.static.nc' oce_cat: /glade/u/home/gmarques/libs/oce-catalogs/reference-datasets.yml - + lnd: land_comparison: parameter_groups: @@ -161,7 +160,7 @@ compute_notebooks: book_toc: # See https://jupyterbook.org/en/stable/structure/configure.html for - ## complete documentation of Jupyter book construction options + # complete documentation of Jupyter book construction options format: jb-book @@ -171,12 +170,12 @@ book_toc: parts: # Parts group notebooks into different sections in the Jupyter book - ### table of contents, so you can organize different parts of your project. + # table of contents, so you can organize different parts of your project. - caption: Atmosphere # Each chapter is the name of one of the notebooks that you executed - ### in compute_notebooks above, also without .ipynb + # in compute_notebooks above, also without .ipynb chapters: - file: atm/adf_quick_run @@ -201,10 +200,3 @@ book_config_keys: # Other keys can be added here, see https://jupyterbook.org/en/stable/customize/config.html ### for many more options - - - - - - - diff --git a/examples/nblibrary/atm/adf_quick_run.ipynb b/examples/nblibrary/atm/adf_quick_run.ipynb index 93c28d53..115369a0 100644 --- a/examples/nblibrary/atm/adf_quick_run.ipynb +++ b/examples/nblibrary/atm/adf_quick_run.ipynb @@ -52,7 +52,7 @@ "### default parameters\n", "# adf_path = \"../../externals/ADF\"\n", "# config_path = \".\"\n", - "# config_fil_str = \"config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae_numcin3.001_vs_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml\"\n" + "# config_fil_str = \"config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae_numcin3.001_vs_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml\"" ] }, { @@ -86,14 +86,14 @@ "outputs": [], "source": [ "# Determine ADF directory path\n", - "# If it is in your cwd, set adf_path = local_path, \n", + "# If it is in your cwd, set adf_path = local_path,\n", "# otherwise set adf_path appropriately\n", "\n", - "local_path = os.path.abspath('')\n", + "local_path = os.path.abspath(\"\")\n", "\n", "# Set up the ADF for your location/user name\n", - "#user = \"richling\" \n", - "#adf_path = f\"/glade/work/{user}/ADF/\"\n", + "# user = \"richling\"\n", + "# adf_path = f\"/glade/work/{user}/ADF/\"\n", "\n", "print(f\"current working directory = {local_path}\")\n", "print(f\"ADF path = {adf_path}\")" @@ -106,11 +106,11 @@ "metadata": {}, "outputs": [], "source": [ - "#set path to ADF lib\n", - "lib_path = os.path.join(adf_path,\"lib\")\n", + "# set path to ADF lib\n", + "lib_path = os.path.join(adf_path, \"lib\")\n", "print(f\"The lib scripts live here, right? {lib_path}\")\n", "\n", - "#Add paths to python path:\n", + "# Add paths to python path:\n", "sys.path.append(lib_path)" ] }, @@ -121,11 +121,11 @@ "metadata": {}, "outputs": [], "source": [ - "#set path to ADF plotting scripts directory\n", - "plotting_scripts_path = os.path.join(adf_path,\"scripts\",\"plotting\")\n", + "# set path to ADF plotting scripts directory\n", + "plotting_scripts_path = os.path.join(adf_path, \"scripts\", \"plotting\")\n", "print(f\"The plotting scripts live here, right? {plotting_scripts_path}\")\n", "\n", - "#Add paths to python path:\n", + "# Add paths to python path:\n", "sys.path.append(plotting_scripts_path)" ] }, @@ -145,7 +145,7 @@ "metadata": {}, "outputs": [], "source": [ - "config_file=os.path.join(config_path,config_fil_str)\n" + "config_file = os.path.join(config_path, config_fil_str)" ] }, { @@ -155,13 +155,13 @@ "metadata": {}, "outputs": [], "source": [ - "#import ADF diagnostics object\n", + "# import ADF diagnostics object\n", "from adf_diag import AdfDiag\n", "\n", "# If this fails, check your paths output in the cells above,\n", "# and that you are running the NPL (conda) Kernel\n", "# You can see all the paths being examined by un-commenting the following:\n", - "#sys.path" + "# sys.path" ] }, { @@ -171,7 +171,7 @@ "metadata": {}, "outputs": [], "source": [ - "#Initialize ADF object\n", + "# Initialize ADF object\n", "adf = AdfDiag(config_file)\n", "adf" ] @@ -205,10 +205,10 @@ }, "outputs": [], "source": [ - "#Create model time series.\n", + "# Create model time series.\n", "adf.create_time_series()\n", "\n", - "#Create model baseline time series (if needed):\n", + "# Create model baseline time series (if needed):\n", "if not adf.compare_obs:\n", " adf.create_time_series(baseline=True)" ] @@ -232,7 +232,7 @@ }, "outputs": [], "source": [ - "#Create model climatology (climo) files.\n", + "# Create model climatology (climo) files.\n", "adf.create_climo()" ] }, @@ -253,9 +253,9 @@ }, "outputs": [], "source": [ - "#Regrid model climatology files to match either\n", - "#observations or CAM baseline climatologies.\n", - "#This call uses the \"regridding_scripts\" specified in the config file:\n", + "# Regrid model climatology files to match either\n", + "# observations or CAM baseline climatologies.\n", + "# This call uses the \"regridding_scripts\" specified in the config file:\n", "adf.regrid_climo()" ] }, @@ -396,7 +396,7 @@ "source": [ "basic_info_dict = adf.read_config_var(\"diag_basic_info\")\n", "\n", - "for key,val in basic_info_dict.items():\n", + "for key, val in basic_info_dict.items():\n", " print(f\"{key}: {val}\")" ] }, @@ -417,7 +417,7 @@ "source": [ "test_dict = adf.read_config_var(\"diag_cam_climo\")\n", "\n", - "for key,val in test_dict.items():\n", + "for key, val in test_dict.items():\n", " print(f\"{key}: {val}\")" ] }, @@ -438,7 +438,7 @@ "source": [ "baseline_dict = adf.read_config_var(\"diag_cam_baseline_climo\")\n", "\n", - "for key,val in baseline_dict.items():\n", + "for key, val in baseline_dict.items():\n", " print(f\"{key}: {val}\")" ] }, @@ -469,8 +469,8 @@ "metadata": {}, "outputs": [], "source": [ - "#List of case names (list by default)\n", - "case_names = adf.get_cam_info(\"cam_case_name\",required=True)\n", + "# List of case names (list by default)\n", + "case_names = adf.get_cam_info(\"cam_case_name\", required=True)\n", "print(case_names)\n", "\n", "base_name = adf.get_baseline_info(\"cam_case_name\")\n", @@ -494,9 +494,9 @@ "metadata": {}, "outputs": [], "source": [ - "case_climo_loc = adf.get_cam_info('cam_climo_loc', required=True)\n", + "case_climo_loc = adf.get_cam_info(\"cam_climo_loc\", required=True)\n", "base_climo_loc = adf.get_baseline_info(\"cam_climo_loc\")\n", - "case_climo_loc,base_climo_loc" + "case_climo_loc, base_climo_loc" ] }, { @@ -573,14 +573,14 @@ "metadata": {}, "outputs": [], "source": [ - "case_names = adf.get_cam_info('cam_case_name', required=True)\n", + "case_names = adf.get_cam_info(\"cam_case_name\", required=True)\n", "case_names_len = len(case_names)\n", - "data_name = adf.get_baseline_info('cam_case_name', required=False)\n", + "data_name = adf.get_baseline_info(\"cam_case_name\", required=False)\n", "\n", "case_ts_locs = adf.get_cam_info(\"cam_ts_loc\", required=True)\n", "data_ts_loc = adf.get_baseline_info(\"cam_ts_loc\", required=False)\n", "\n", - "res = adf.variable_defaults # dict of variable-specific plot preferences\n", + "res = adf.variable_defaults # dict of variable-specific plot preferences\n", "# or an empty dictionary if use_defaults was not specified in YAML.\n", "\n", "start_year = adf.climo_yrs[\"syears\"]\n", @@ -607,12 +607,14 @@ " print(\"Input file list is empty.\")\n", " return None\n", " elif len(fils) > 1:\n", - " return xr.open_mfdataset(fils, combine='by_coords')\n", + " return xr.open_mfdataset(fils, combine=\"by_coords\")\n", " else:\n", " sfil = str(fils[0])\n", " return xr.open_dataset(sfil)\n", - " #End if\n", - "#End def" + " # End if\n", + "\n", + "\n", + "# End def" ] }, { @@ -622,42 +624,53 @@ "metadata": {}, "outputs": [], "source": [ - "def _data_calcs(ts_loc,var,subset=None):\n", + "def _data_calcs(ts_loc, var, subset=None):\n", " \"\"\"\n", " args\n", " ----\n", " - ts_loc: Path\n", " path to time series file\n", - " \n", + "\n", " - var: str\n", " name of variable\n", - " \n", - " - subset (optional): dict \n", + "\n", + " - subset (optional): dict\n", " lat/lon extents (south, north, east, west)\n", " \"\"\"\n", " fils = sorted(list(Path(ts_loc).glob(f\"*{var}*.nc\")))\n", "\n", " ts_ds = _load_dataset(fils)\n", - " \n", - " time = ts_ds['time']\n", - " time = xr.DataArray(ts_ds['time_bnds'].load().mean(dim='nbnd').values, dims=time.dims, attrs=time.attrs)\n", - " ts_ds['time'] = time\n", + "\n", + " time = ts_ds[\"time\"]\n", + " time = xr.DataArray(\n", + " ts_ds[\"time_bnds\"].load().mean(dim=\"nbnd\").values,\n", + " dims=time.dims,\n", + " attrs=time.attrs,\n", + " )\n", + " ts_ds[\"time\"] = time\n", " ts_ds.assign_coords(time=time)\n", " ts_ds = xr.decode_cf(ts_ds)\n", - " \n", + "\n", " if subset != None:\n", - " ts_ds = ts_ds.sel(lat=slice(subset[\"s\"],subset[\"n\"]), lon=slice(subset[\"w\"],subset[\"e\"])) \n", - " \n", + " ts_ds = ts_ds.sel(\n", + " lat=slice(subset[\"s\"], subset[\"n\"]), lon=slice(subset[\"w\"], subset[\"e\"])\n", + " )\n", + "\n", " data = ts_ds[var].squeeze()\n", " unit = data.units\n", - " \n", + "\n", " # global weighting\n", " w = np.cos(np.radians(data.lat))\n", - " avg = data.weighted(w).mean(dim=(\"lat\",\"lon\"))\n", - " \n", - " yrs = np.unique([str(val.item().timetuple().tm_year).zfill(4) for _,val in enumerate(ts_ds[\"time\"])])\n", + " avg = data.weighted(w).mean(dim=(\"lat\", \"lon\"))\n", "\n", - " return avg,yrs,unit" + " yrs = np.unique(\n", + " [\n", + " str(val.item().timetuple().tm_year).zfill(4)\n", + " for _, val in enumerate(ts_ds[\"time\"])\n", + " ]\n", + " )\n", + "\n", + " return avg, yrs, unit" ] }, { @@ -667,7 +680,7 @@ "metadata": {}, "outputs": [], "source": [ - "def ts_plot(ax, name, vals, yrs, unit, color_dict,linewidth=None,zorder=1):\n", + "def ts_plot(ax, name, vals, yrs, unit, color_dict, linewidth=None, zorder=1):\n", " \"\"\"\n", " args\n", " ----\n", @@ -675,15 +688,23 @@ " color and marker style for variable\n", " \"\"\"\n", "\n", - " ax.plot(yrs, vals, color_dict[\"marker\"], c=color_dict[\"color\"],label=name,linewidth=linewidth,zorder=zorder)\n", + " ax.plot(\n", + " yrs,\n", + " vals,\n", + " color_dict[\"marker\"],\n", + " c=color_dict[\"color\"],\n", + " label=name,\n", + " linewidth=linewidth,\n", + " zorder=zorder,\n", + " )\n", "\n", - " ax.set_xlabel(\"Years\",fontsize=15,labelpad=20)\n", - " ax.set_ylabel(unit,fontsize=15,labelpad=20) \n", + " ax.set_xlabel(\"Years\", fontsize=15, labelpad=20)\n", + " ax.set_ylabel(unit, fontsize=15, labelpad=20)\n", "\n", " # For the minor ticks, use no labels; default NullFormatter.\n", - " ax.tick_params(which='major', length=7)\n", - " ax.tick_params(which='minor', length=5)\n", - " \n", + " ax.tick_params(which=\"major\", length=7)\n", + " ax.tick_params(which=\"minor\", length=5)\n", + "\n", " return ax" ] }, @@ -695,58 +716,69 @@ "outputs": [], "source": [ "def plot_var_details(ax, var, vals_cases, vals_base):\n", - " \n", " mins = []\n", " maxs = []\n", - " for i,val in enumerate(vals_cases):\n", - "\n", + " for i, val in enumerate(vals_cases):\n", " mins.append(np.nanmin(vals_cases[i]))\n", " maxs.append(np.nanmax(vals_cases[i]))\n", "\n", " mins.append(np.nanmin(vals_base))\n", " maxs.append(np.nanmax(vals_base))\n", "\n", - " if var == \"SST\": \n", - " ax.set_ylabel(\"K\",fontsize=20,labelpad=12)\n", + " if var == \"SST\":\n", + " ax.set_ylabel(\"K\", fontsize=20, labelpad=12)\n", " tick_spacing = 0.5\n", " ax.yaxis.set_major_locator(MultipleLocator(1))\n", - " ax.set_title(f\"Time Series Global: {var} - ANN\",loc=\"left\",fontsize=22)\n", - " \n", + " ax.set_title(f\"Time Series Global: {var} - ANN\", loc=\"left\", fontsize=22)\n", + "\n", " if var == \"TS\":\n", - " ax.set_ylabel(\"K\",fontsize=20,labelpad=12)\n", + " ax.set_ylabel(\"K\", fontsize=20, labelpad=12)\n", " tick_spacing = 0.5\n", " ax.yaxis.set_minor_locator(MultipleLocator(0.5))\n", - " ax.set_title(f\"Time Series Global: {var} - ANN\",loc=\"left\",fontsize=22)\n", + " ax.set_title(f\"Time Series Global: {var} - ANN\", loc=\"left\", fontsize=22)\n", "\n", " if var == \"ICEFRAC\":\n", - " ax.set_ylabel(\"frac\",fontsize=20,labelpad=12)\n", + " ax.set_ylabel(\"frac\", fontsize=20, labelpad=12)\n", " tick_spacing = 0.1\n", - " ax.set_ylim(np.floor(min(mins)),np.ceil(max(maxs)))\n", - " ax.set_title(f\"Time Series LabSea: {var} - ANN\",loc=\"left\",fontsize=22)\n", + " ax.set_ylim(np.floor(min(mins)), np.ceil(max(maxs)))\n", + " ax.set_title(f\"Time Series LabSea: {var} - ANN\", loc=\"left\", fontsize=22)\n", "\n", " if var == \"RESTOM\":\n", - " ax.set_ylabel(\"W/m2\",fontsize=20,labelpad=12)\n", + " ax.set_ylabel(\"W/m2\", fontsize=20, labelpad=12)\n", " tick_spacing = 0.5\n", " ax.yaxis.set_minor_locator(MultipleLocator(0.1))\n", - " ax.set_title(f\"Time Series Global: {var} - ANN\",loc=\"left\",fontsize=22)\n", - " \n", + " ax.set_title(f\"Time Series Global: {var} - ANN\", loc=\"left\", fontsize=22)\n", + "\n", " # Set label to show if RESTOM is 1 or 5-yr avg\n", - " line_1yr = Line2D([], [], label='1-yr avg', color='k', linewidth=1,marker='*',) \n", - " line_5yr = Line2D([], [], label='5-yr avg', color='k', linewidth=1,)\n", - " ax.legend(handles=[line_1yr,line_5yr], bbox_to_anchor=(0.99, 0.99))\n", - " \n", + " line_1yr = Line2D(\n", + " [],\n", + " [],\n", + " label=\"1-yr avg\",\n", + " color=\"k\",\n", + " linewidth=1,\n", + " marker=\"*\",\n", + " )\n", + " line_5yr = Line2D(\n", + " [],\n", + " [],\n", + " label=\"5-yr avg\",\n", + " color=\"k\",\n", + " linewidth=1,\n", + " )\n", + " ax.legend(handles=[line_1yr, line_5yr], bbox_to_anchor=(0.99, 0.99))\n", + "\n", " # Add extra space on the y-axis, except for ICEFRAC\n", " if var != \"ICEFRAC\":\n", - " ax.set_ylim(np.floor(min(mins)),np.ceil(max(maxs))+tick_spacing)\n", - " \n", + " ax.set_ylim(np.floor(min(mins)), np.ceil(max(maxs)) + tick_spacing)\n", + "\n", " ax.yaxis.set_major_locator(MultipleLocator(tick_spacing))\n", - " \n", - " ax.tick_params(axis='y', which='major', labelsize=16)\n", - " ax.tick_params(axis='y', which='minor', labelsize=16)\n", - " \n", - " ax.tick_params(axis='x', which='major', labelsize=14)\n", - " ax.tick_params(axis='x', which='minor', labelsize=14)\n", - " \n", + "\n", + " ax.tick_params(axis=\"y\", which=\"major\", labelsize=16)\n", + " ax.tick_params(axis=\"y\", which=\"minor\", labelsize=16)\n", + "\n", + " ax.tick_params(axis=\"x\", which=\"major\", labelsize=14)\n", + " ax.tick_params(axis=\"x\", which=\"minor\", labelsize=14)\n", + "\n", " return ax" ] }, @@ -773,7 +805,7 @@ "metadata": {}, "outputs": [], "source": [ - "ts_var_list = [\"RESTOM\",\"TS\",\"ICEFRAC\"]" + "ts_var_list = [\"RESTOM\", \"TS\", \"ICEFRAC\"]" ] }, { @@ -791,19 +823,19 @@ "from matplotlib.ticker import MultipleLocator\n", "from matplotlib.lines import Line2D\n", "\n", - "fig = plt.figure(figsize=(30,15))\n", + "fig = plt.figure(figsize=(30, 15))\n", "\n", "# Change the layout/number of subplots based off number of variables desired\n", "rows = 2\n", "cols = 3\n", - "gs = fig.add_gridspec(rows, cols, hspace=.3, wspace=.2)\n", + "gs = fig.add_gridspec(rows, cols, hspace=0.3, wspace=0.2)\n", "\n", "# Rough subset for Lab Sea\n", - "w = -63.5+360\n", - "e = -47.5+360\n", + "w = -63.5 + 360\n", + "e = -47.5 + 360\n", "s = 53.5\n", "n = 65.5\n", - "subset = {\"s\":s,\"n\":n,\"e\":e,\"w\":w}\n", + "subset = {\"s\": s, \"n\": n, \"e\": e, \"w\": w}\n", "\n", "# Add more colors as needed for number of test cases\n", "# ** Baseline is already added as green dashed line in plotting function **\n", @@ -811,13 +843,12 @@ "colors = [\"k\", \"aqua\", \"orange\", \"b\", \"magenta\", \"goldenrod\", \"slategrey\", \"rosybrown\"]\n", "\n", "# Setup plotting\n", - "#---------------\n", + "# ---------------\n", "\n", "# Loop over variables:\n", - "for i,var in enumerate(ts_var_list):\n", - " \n", - " print(\"Plotting variable:\",var)\n", - " \n", + "for i, var in enumerate(ts_var_list):\n", + " print(\"Plotting variable:\", var)\n", + "\n", " if var == \"RESTOM\":\n", " ax = plt.subplot(gs[0, 0])\n", " if var == \"TS\":\n", @@ -826,76 +857,79 @@ " ax = plt.subplot(gs[0, 2])\n", "\n", " # Grab baseline case:\n", - " #--------------------\n", + " # --------------------\n", "\n", - " if var == \"RESTOM\": \n", - " avg_base_FSNT,yrs_base,unit = _data_calcs(data_ts_loc,'FSNT')\n", - " avg_base_FLNT,_,_ = _data_calcs(data_ts_loc,\"FLNT\")\n", + " if var == \"RESTOM\":\n", + " avg_base_FSNT, yrs_base, unit = _data_calcs(data_ts_loc, \"FSNT\")\n", + " avg_base_FLNT, _, _ = _data_calcs(data_ts_loc, \"FLNT\")\n", " if len(yrs_base) < 5:\n", - " print(f\"Not a lot of climo years for {data_name}, only doing 1-yr avg for RESTOM...\")\n", + " print(\n", + " f\"Not a lot of climo years for {data_name}, only doing 1-yr avg for RESTOM...\"\n", + " )\n", " FSNT_base = avg_base_FSNT\n", " FLNT_base = avg_base_FLNT\n", " else:\n", - " FSNT_base = avg_base_FSNT.rolling(time=60,center=True).mean()\n", - " FLNT_base = avg_base_FLNT.rolling(time=60,center=True).mean()\n", + " FSNT_base = avg_base_FSNT.rolling(time=60, center=True).mean()\n", + " FLNT_base = avg_base_FLNT.rolling(time=60, center=True).mean()\n", "\n", " avg_base = FSNT_base - FLNT_base\n", - " \n", - " if (var == \"TS\" or var == \"SST\"):\n", - " avg_base,yrs_base,unit = _data_calcs(data_ts_loc,var)\n", - " \n", + "\n", + " if var == \"TS\" or var == \"SST\":\n", + " avg_base, yrs_base, unit = _data_calcs(data_ts_loc, var)\n", + "\n", " if var == \"ICEFRAC\":\n", - " avg_base,yrs_base,unit = _data_calcs(data_ts_loc,var,subset)\n", - " \n", + " avg_base, yrs_base, unit = _data_calcs(data_ts_loc, var, subset)\n", + "\n", " # Get int of years for plotting on x-axis\n", " yrs_base_int = yrs_base.astype(int)\n", "\n", " # Create yearly averages\n", " vals_base = [avg_base.sel(time=i).mean() for i in yrs_base]\n", - " \n", + "\n", " # Plot baseline data\n", - " color_dict = {\"color\":\"g\",\"marker\":\"--\"}\n", + " color_dict = {\"color\": \"g\", \"marker\": \"--\"}\n", " ax = ts_plot(ax, data_name, vals_base, yrs_base_int, unit, color_dict)\n", "\n", " # Loop over test cases:\n", - " #----------------------\n", + " # ----------------------\n", " # Create lists to hold all sets of years (for each case) and\n", " # sets of var data (for each case)\n", " vals_cases = []\n", " yrs_cases = []\n", " for case_idx, case_name in enumerate(case_names):\n", - "\n", " if var == \"RESTOM\":\n", - " avg_case_FSNT,yrs_case,unit = _data_calcs(case_ts_locs[case_idx],'FSNT')\n", - " avg_case_FLNT,_,_ = _data_calcs(case_ts_locs[case_idx],\"FLNT\")\n", + " avg_case_FSNT, yrs_case, unit = _data_calcs(case_ts_locs[case_idx], \"FSNT\")\n", + " avg_case_FLNT, _, _ = _data_calcs(case_ts_locs[case_idx], \"FLNT\")\n", " if len(yrs_case) < 5:\n", - " print(f\"Not a lot of climo years for {case_name}, only doing 1-yr avg for RESTOM...\")\n", + " print(\n", + " f\"Not a lot of climo years for {case_name}, only doing 1-yr avg for RESTOM...\"\n", + " )\n", " FSNT_case = avg_case_FSNT\n", " FLNT_case = avg_case_FLNT\n", - " color_dict = {\"color\":colors[case_idx],\"marker\":\"-*\"}\n", + " color_dict = {\"color\": colors[case_idx], \"marker\": \"-*\"}\n", " else:\n", - " FSNT_case = avg_case_FSNT.rolling(time=60,center=True).mean()\n", - " FLNT_case = avg_case_FLNT.rolling(time=60,center=True).mean()\n", - " color_dict = {\"color\":colors[case_idx],\"marker\":\"-\"}\n", + " FSNT_case = avg_case_FSNT.rolling(time=60, center=True).mean()\n", + " FLNT_case = avg_case_FLNT.rolling(time=60, center=True).mean()\n", + " color_dict = {\"color\": colors[case_idx], \"marker\": \"-\"}\n", "\n", " avg_case = FSNT_case - FLNT_case\n", "\n", " if var == \"TS\":\n", - " avg_case,yrs_case,unit = _data_calcs(case_ts_locs[case_idx],var)\n", - " color_dict = {\"color\":colors[case_idx],\"marker\":\"-\"}\n", - " \n", + " avg_case, yrs_case, unit = _data_calcs(case_ts_locs[case_idx], var)\n", + " color_dict = {\"color\": colors[case_idx], \"marker\": \"-\"}\n", + "\n", " if var == \"ICEFRAC\":\n", - " avg_case,yrs_case,unit = _data_calcs(case_ts_locs[case_idx],var,subset)\n", - " color_dict = {\"color\":colors[case_idx],\"marker\":\"-\"}\n", - " \n", + " avg_case, yrs_case, unit = _data_calcs(case_ts_locs[case_idx], var, subset)\n", + " color_dict = {\"color\": colors[case_idx], \"marker\": \"-\"}\n", + "\n", " # Get yearly averages for all available years\n", " vals_case = [avg_case.sel(time=i).mean() for i in yrs_case]\n", " vals_cases.append(vals_case)\n", - " \n", + "\n", " # Get int of years for plotting on x-axis\n", " yrs_case_int = yrs_case.astype(int)\n", " yrs_cases.append(yrs_case_int)\n", - " \n", + "\n", " # Add case to plot (ax)\n", " ax = ts_plot(ax, case_name, vals_case, yrs_case_int, unit, color_dict)\n", "\n", @@ -904,7 +938,7 @@ " # Get variable details\n", " ax = plot_var_details(ax, var, vals_cases, vals_base)\n", "\n", - " #Grab all unique years and find min/max years\n", + " # Grab all unique years and find min/max years\n", " uniq_yrs = sorted(x for v in yrs_cases for x in v)\n", " max_year = int(max(uniq_yrs))\n", " min_year = int(min(uniq_yrs))\n", @@ -918,22 +952,22 @@ " first_year = 0\n", "\n", " ax.set_xlim(first_year, last_year)\n", - " ax.set_xlabel(\"Years\",fontsize=15,labelpad=20)\n", + " ax.set_xlabel(\"Years\", fontsize=15, labelpad=20)\n", " # Set the x-axis plot limits\n", " # to guarantee data from all cases (including baseline) are on plot\n", - " ax.set_xlim(min_year, max_year+1)\n", + " ax.set_xlim(min_year, max_year + 1)\n", "\n", " # x-axis ticks and numbers\n", - " if max_year-min_year > 120:\n", + " if max_year - min_year > 120:\n", " ax.xaxis.set_major_locator(MultipleLocator(20))\n", " ax.xaxis.set_minor_locator(MultipleLocator(10))\n", - " if 10 <= max_year-min_year <= 120:\n", + " if 10 <= max_year - min_year <= 120:\n", " ax.xaxis.set_major_locator(MultipleLocator(5))\n", " ax.xaxis.set_minor_locator(MultipleLocator(1))\n", - " if 0 < max_year-min_year < 10:\n", + " if 0 < max_year - min_year < 10:\n", " ax.xaxis.set_major_locator(MultipleLocator(1))\n", " ax.xaxis.set_minor_locator(MultipleLocator(1))\n", - " \n", + "\n", " # End for (case loop)\n", "# End for (variables loop)\n", "\n", @@ -941,13 +975,17 @@ "# Gather labels based on case names and plotted line format (color, style, etc)\n", "lines_labels = [ax.get_legend_handles_labels() for ax in fig.axes]\n", "lines, labels = [sum(lol, []) for lol in zip(*lines_labels)]\n", - "fig.legend(lines[:case_names_len+1], labels[:case_names_len+1],\n", - " loc=\"center left\",fontsize=18,\n", - " bbox_to_anchor=(0.365, 0.4,.02,.05)) #bbox_to_anchor(x0, y0, width, height)\n", + "fig.legend(\n", + " lines[: case_names_len + 1],\n", + " labels[: case_names_len + 1],\n", + " loc=\"center left\",\n", + " fontsize=18,\n", + " bbox_to_anchor=(0.365, 0.4, 0.02, 0.05),\n", + ") # bbox_to_anchor(x0, y0, width, height)\n", "\n", "fig.show()\n", "\n", - "#plt.savefig(\"TimeSeries_ANN.png\", facecolor='w',bbox_inches=\"tight\")" + "# plt.savefig(\"TimeSeries_ANN.png\", facecolor='w',bbox_inches=\"tight\")" ] }, { diff --git a/examples/nblibrary/atm/config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml b/examples/nblibrary/atm/config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml index 880a4d04..bba9cb08 100644 --- a/examples/nblibrary/atm/config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml +++ b/examples/nblibrary/atm/config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml @@ -62,7 +62,7 @@ diag_basic_info: #Is this a model vs observations comparison? #If "false" or missing, then a model-model comparison is assumed: compare_obs: false - + hist_str: cam.h0 #Generate HTML website (assumed false if missing): @@ -111,9 +111,6 @@ diag_basic_info: num_procs: 8 redo_plot: false - - - @@ -139,7 +136,7 @@ diag_cam_climo: yrs: ${diag_cam_climo.start_year}-${diag_cam_climo.end_year} - #Location of CAM climatologies: + #Location of CAM climatologies: cam_climo_loc: ${climo_loc}${diag_cam_climo.cam_case_name}/atm/proc/${diag_cam_climo.yrs}/ #model year when time series files should start: @@ -203,7 +200,7 @@ diag_cam_baseline_climo: cam_hist_loc: /glade/campaign/cesm/development/cross-wg/diagnostic_framework/CESM_output_for_testing/${diag_cam_baseline_climo.cam_case_name}/atm/hist/ yrs: ${diag_cam_baseline_climo.start_year}-${diag_cam_baseline_climo.end_year} - + #Location of baseline CAM climatologies: cam_climo_loc: /glade/campaign/cesm/development/cross-wg/diagnostic_framework/ADF-data/climos/${diag_cam_baseline_climo.cam_case_name}/${diag_cam_baseline_climo.yrs}/ @@ -245,7 +242,7 @@ diag_cam_baseline_climo: overwrite_tem: false - case_nickname: ${diag_cam_baseline_climo.cam_case_name} + case_nickname: ${diag_cam_baseline_climo.cam_case_name} #This fourth set of variables provides settings for calling the Climate Variability @@ -365,10 +362,10 @@ diag_var_list: - ICEFRAC - OCNFRAC - LANDFRAC - + derived_var_list: - - RESTOM - + - RESTOM + # diff --git a/examples/nblibrary/ice/cice_vars.yml b/examples/nblibrary/ice/cice_vars.yml index 00e6d61f..5c5de2da 100644 --- a/examples/nblibrary/ice/cice_vars.yml +++ b/examples/nblibrary/ice/cice_vars.yml @@ -3,22 +3,22 @@ aice: - levels: [0.05,0.10,0.15,0.20,0.30,0.40,0.50,0.60,0.70,0.80,0.85,0.90,0.95,0.99] - title: "Sea Ice Concentration" -hi: +hi: - levels: [0.05,0.1,0.25,0.5,0.75,1.0,1.5,2.0,2.5,3.0,3.5,4.0,4.5,5.0] - title: "Sea Ice Thickness (m)" hs: - levels: [0.01,0.03,0.05,0.07,0.10,0.13,0.15,0.20,0.25,0.30,0.35,0.40,0.45,0.50] - title: "Snow Depth (m)" -Tsfc: +Tsfc: - levels: [-40.,-37.,-34.,-31.,-28.,-25.,-22.,-19.,-16.,-13.,-10.,-5.,-3.,-1.] - title: "Surface Temperature (C)" -albsni: +albsni: - levels: [5,10,15,20,30,40,50, 60, 65, 70, 75, 80,85, 90] - title: "Snow Ice Albedo" -flat: +flat: - levels: [-18.,-16.,-14.,-12.,-10.,-8.,-6.,-5.,-4.,-3.,-2.,-1.,0.,2.] - title: "Latent Heat Flux (W/m^2}" -fsens: +fsens: - levels: [-30.,-25.,-20.,-15.,-10.,-5.,-2.5,0,2.5,5,10,15,20,25] - title: "Sensible Heat Flux (W/m^2)" congel: diff --git a/examples/nblibrary/ice/plot_diff.py b/examples/nblibrary/ice/plot_diff.py index 4eed1dc2..d6395dec 100644 --- a/examples/nblibrary/ice/plot_diff.py +++ b/examples/nblibrary/ice/plot_diff.py @@ -1,92 +1,100 @@ +from __future__ import annotations -import numpy as np +import cartopy.crs as ccrs +import cartopy.feature as cfeature import matplotlib as mpl -import matplotlib.pyplot as plt import matplotlib.path as mpath +import matplotlib.pyplot as plt +import numpy as np from matplotlib.gridspec import GridSpec -import cartopy.crs as ccrs -import cartopy.feature as cfeature -def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): - # make circular boundary for polar stereographic circular plots - theta = np.linspace(0, 2*np.pi, 100) - center, radius = [0.5, 0.5], 0.5 - verts = np.vstack([np.sin(theta), np.cos(theta)]).T - circle = mpath.Path(verts * radius + center) - - - if (np.size(levels) > 2): - cmap = mpl.colormaps['tab20'] - norm = mpl.colors.BoundaryNorm(levels, ncolors=cmap.N) - - # set up the figure with a North Polar Stereographic projection - fig = plt.figure(tight_layout=True) - gs = GridSpec(2, 4) - - if (proj == "N"): - ax = fig.add_subplot(gs[0,:2], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[0,:2], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - - ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - field_diff = field2.values-field1.values - field_std = field_diff.std() - - this=ax.pcolormesh(TLON, - TLAT, - field1, - norm = norm, - cmap="tab20", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case1,fontsize=10) - - if (proj == "N"): - ax = fig.add_subplot(gs[0,2:], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[0,2:], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - - ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - field2, - norm=norm, - cmap="tab20", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case2,fontsize=10) - - if (proj == "N"): - ax = fig.add_subplot(gs[1,1:3], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[1,1:3], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - - ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - field_diff, - cmap="seismic",vmax=field_std*2.0,vmin=-field_std*2.0, - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case2+"-"+case1,fontsize=10) - - plt.suptitle(title) +def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): + # make circular boundary for polar stereographic circular plots + theta = np.linspace(0, 2 * np.pi, 100) + center, radius = [0.5, 0.5], 0.5 + verts = np.vstack([np.sin(theta), np.cos(theta)]).T + circle = mpath.Path(verts * radius + center) + + if np.size(levels) > 2: + cmap = mpl.colormaps["tab20"] + norm = mpl.colors.BoundaryNorm(levels, ncolors=cmap.N) + + # set up the figure with a North Polar Stereographic projection + fig = plt.figure(tight_layout=True) + gs = GridSpec(2, 4) + + if proj == "N": + ax = fig.add_subplot(gs[0, :2], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[0, :2], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + + ax.set_boundary(circle, transform=ax.transAxes) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + field_diff = field2.values - field1.values + field_std = field_diff.std() + + this = ax.pcolormesh( + TLON, + TLAT, + field1, + norm=norm, + cmap="tab20", + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case1, fontsize=10) + + if proj == "N": + ax = fig.add_subplot(gs[0, 2:], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[0, 2:], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + + ax.set_boundary(circle, transform=ax.transAxes) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + field2, + norm=norm, + cmap="tab20", + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case2, fontsize=10) + + if proj == "N": + ax = fig.add_subplot(gs[1, 1:3], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[1, 1:3], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + + ax.set_boundary(circle, transform=ax.transAxes) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + field_diff, + cmap="seismic", + vmax=field_std * 2.0, + vmin=-field_std * 2.0, + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case2 + "-" + case1, fontsize=10) + + plt.suptitle(title) diff --git a/examples/nblibrary/ice/seaice.ipynb b/examples/nblibrary/ice/seaice.ipynb index 658a1274..077cfd56 100644 --- a/examples/nblibrary/ice/seaice.ipynb +++ b/examples/nblibrary/ice/seaice.ipynb @@ -45,8 +45,11 @@ "outputs": [], "source": [ "CESM_output_dir = \"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/CESM_output_for_testing\"\n", - "serial = False # use dask LocalCluster\n", - "cases = [\"g.e23_a16g.GJRAv4.TL319_t232_zstar_N65.2024.004\",\"g.e23_a16g.GJRAv4.TL319_t232_hycom1_N75.2024.005\"]\n", + "serial = False # use dask LocalCluster\n", + "cases = [\n", + " \"g.e23_a16g.GJRAv4.TL319_t232_zstar_N65.2024.004\",\n", + " \"g.e23_a16g.GJRAv4.TL319_t232_hycom1_N75.2024.005\",\n", + "]\n", "\n", "lc_kwargs = {}\n", "\n", @@ -65,10 +68,10 @@ "outputs": [], "source": [ "# Spin up cluster (if running in parallel)\n", - "client=None\n", + "client = None\n", "if not serial:\n", - " cluster = LocalCluster(**lc_kwargs)\n", - " client = Client(cluster)\n", + " cluster = LocalCluster(**lc_kwargs)\n", + " client = Client(cluster)\n", "\n", "client" ] @@ -90,42 +93,72 @@ "cbegyr2 = f\"{begyr2:04d}\"\n", "cendyr2 = f\"{endyr2:04d}\"\n", "\n", - "ds1 = xr.open_mfdataset(CESM_output_dir+\"/\"+case1+\"/ts/\"+case1+\".cice.h.\"+\"*.\"+cbegyr1+\"01-\"+cendyr1+\"12.nc\",\n", - " data_vars='minimal', compat='override', coords='minimal')\n", - "ds2 = xr.open_mfdataset(CESM_output_dir+\"/\"+case2+\"/ts/\"+case2+\".cice.h.\"+\"*.\"+cbegyr2+\"01-\"+cendyr2+\"12.nc\",\n", - " data_vars='minimal', compat='override', coords='minimal')\n", - "\n", - "TLAT = ds1['TLAT']\n", - "TLON = ds1['TLON']\n", - "tarea = ds1['tarea']\n", + "ds1 = xr.open_mfdataset(\n", + " CESM_output_dir\n", + " + \"/\"\n", + " + case1\n", + " + \"/ts/\"\n", + " + case1\n", + " + \".cice.h.\"\n", + " + \"*.\"\n", + " + cbegyr1\n", + " + \"01-\"\n", + " + cendyr1\n", + " + \"12.nc\",\n", + " data_vars=\"minimal\",\n", + " compat=\"override\",\n", + " coords=\"minimal\",\n", + ")\n", + "ds2 = xr.open_mfdataset(\n", + " CESM_output_dir\n", + " + \"/\"\n", + " + case2\n", + " + \"/ts/\"\n", + " + case2\n", + " + \".cice.h.\"\n", + " + \"*.\"\n", + " + cbegyr2\n", + " + \"01-\"\n", + " + cendyr2\n", + " + \"12.nc\",\n", + " data_vars=\"minimal\",\n", + " compat=\"override\",\n", + " coords=\"minimal\",\n", + ")\n", + "\n", + "TLAT = ds1[\"TLAT\"]\n", + "TLON = ds1[\"TLON\"]\n", + "tarea = ds1[\"tarea\"]\n", "\n", "# Make a DataArray with the number of days in each month, size = len(time)\n", "month_length = ds1.time.dt.days_in_month\n", - "weights_monthly = month_length.groupby(\"time.year\") / month_length.groupby(\"time.year\").sum()\n", + "weights_monthly = (\n", + " month_length.groupby(\"time.year\") / month_length.groupby(\"time.year\").sum()\n", + ")\n", "\n", "\n", - "#seasons = xr.full_like(months, fill_value=\"none\", dtype=\"U4\")\n", - "#seasons.name = \"season\"\n", - "#seasons[months.isin([1, 2, 3])] = \"JFM\"\n", - "#seasons[months.isin([4, 5, 6])] = \"AMJ\"\n", - "#seasons[months.isin([7, 8, 9])] = \"JAS\"\n", - "#seasons[months.isin([10, 11, 12])] = \"OND\"\n", - "#weights_season = month_length.groupby(seasons) / month_length.groupby(seasons).sum()\n", + "# seasons = xr.full_like(months, fill_value=\"none\", dtype=\"U4\")\n", + "# seasons.name = \"season\"\n", + "# seasons[months.isin([1, 2, 3])] = \"JFM\"\n", + "# seasons[months.isin([4, 5, 6])] = \"AMJ\"\n", + "# seasons[months.isin([7, 8, 9])] = \"JAS\"\n", + "# seasons[months.isin([10, 11, 12])] = \"OND\"\n", + "# weights_season = month_length.groupby(seasons) / month_length.groupby(seasons).sum()\n", "\n", "ds1_ann = (ds1 * weights_monthly).resample(time=\"YS\").sum(dim=\"time\")\n", "ds2_ann = (ds2 * weights_monthly).resample(time=\"YS\").sum(dim=\"time\")\n", "\n", "\n", - "#ds1_seas = (ds1 * weights_season).resample(time=\"QS-JAN\").sum(dim=\"time\")\n", - "#ds2_seas = (ds2 * weights_season).resample(time=\"QS-JAN\").sum(dim=\"time\")\n", + "# ds1_seas = (ds1 * weights_season).resample(time=\"QS-JAN\").sum(dim=\"time\")\n", + "# ds2_seas = (ds2 * weights_season).resample(time=\"QS-JAN\").sum(dim=\"time\")\n", "\n", - "with open('cice_masks.yml', 'r') as file:\n", + "with open(\"cice_masks.yml\", \"r\") as file:\n", " cice_masks = yaml.safe_load(file)\n", "\n", - "with open('cice_vars.yml', 'r') as file:\n", + "with open(\"cice_vars.yml\", \"r\") as file:\n", " cice_vars = yaml.safe_load(file)\n", "\n", - "print(ds1['aice'])\n" + "print(ds1[\"aice\"])" ] }, { @@ -147,13 +180,13 @@ "outputs": [], "source": [ "for var in cice_vars:\n", - " vmin=cice_vars[var][0]['levels'][0]\n", - " vmax=cice_vars[var][0]['levels'][-1]\n", - " levels = np.array(cice_vars[var][0]['levels'])\n", - " title=cice_vars[var][1]['title']\n", - " field1 = ds1_ann[var].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - " field2 = ds2_ann[var].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - " plot_diff(field1,field2,levels,case1,case2,title,\"N\",TLAT,TLON)" + " vmin = cice_vars[var][0][\"levels\"][0]\n", + " vmax = cice_vars[var][0][\"levels\"][-1]\n", + " levels = np.array(cice_vars[var][0][\"levels\"])\n", + " title = cice_vars[var][1][\"title\"]\n", + " field1 = ds1_ann[var].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + " field2 = ds2_ann[var].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + " plot_diff(field1, field2, levels, case1, case2, title, \"N\", TLAT, TLON)" ] }, { @@ -164,13 +197,13 @@ "outputs": [], "source": [ "for var in cice_vars:\n", - " vmin=cice_vars[var][0]['levels'][0]\n", - " vmax=cice_vars[var][0]['levels'][1]\n", - " levels = np.array(cice_vars[var][0]['levels'])\n", - " title=cice_vars[var][1]['title']\n", - " field1 = ds1_ann[var].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - " field2 = ds2_ann[var].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - " plot_diff(field1,field2,levels,case1,case2,title,\"S\",TLAT,TLON)" + " vmin = cice_vars[var][0][\"levels\"][0]\n", + " vmax = cice_vars[var][0][\"levels\"][1]\n", + " levels = np.array(cice_vars[var][0][\"levels\"])\n", + " title = cice_vars[var][1][\"title\"]\n", + " field1 = ds1_ann[var].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + " field2 = ds2_ann[var].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + " plot_diff(field1, field2, levels, case1, case2, title, \"S\", TLAT, TLON)" ] }, { @@ -180,43 +213,43 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area = (tarea*ds1.aice).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area = (tarea*ds2.aice).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area = (tarea * ds1.aice).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area = (tarea * ds2.aice).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", - "ds1_vhi = (tarea*ds1.hi).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", - "ds2_vhi = (tarea*ds2.hi).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", + "ds1_vhi = (tarea * ds1.hi).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", + "ds2_vhi = (tarea * ds2.hi).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", "\n", - "ds1_vhs = (tarea*ds1.hs).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", - "ds2_vhs = (tarea*ds2.hs).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", + "ds1_vhs = (tarea * ds1.hs).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", + "ds2_vhs = (tarea * ds2.hs).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", "\n", - "fig = plt.figure(figsize=(10,10),tight_layout=True)\n", + "fig = plt.figure(figsize=(10, 10), tight_layout=True)\n", "\n", - "ax = fig.add_subplot(3,1,1)\n", + "ax = fig.add_subplot(3, 1, 1)\n", "ds1_vhi.plot()\n", "ds2_vhi.plot()\n", "\n", - "plt.ylim((0,10))\n", + "plt.ylim((0, 10))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"NH Sea Ice Volume $m x 10^{13}$\")\n", - "plt.legend([case1,case2])\n", + "plt.legend([case1, case2])\n", "\n", - "ax = fig.add_subplot(3,1,2)\n", + "ax = fig.add_subplot(3, 1, 2)\n", "ds1_vhs.plot()\n", "ds2_vhs.plot()\n", "\n", - "plt.ylim((0,1))\n", + "plt.ylim((0, 1))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"NH Snow Volume $m x 10^{13}$\")\n", - "plt.legend([case1,case2])\n", + "plt.legend([case1, case2])\n", "\n", - "ax = fig.add_subplot(3,1,3)\n", + "ax = fig.add_subplot(3, 1, 3)\n", "ds1_area.plot()\n", "ds2_area.plot()\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"NH Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -226,43 +259,43 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area_ann = (tarea*ds1_ann['aice']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area_ann = (tarea*ds2_ann['aice']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area_ann = (tarea * ds1_ann[\"aice\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area_ann = (tarea * ds2_ann[\"aice\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", - "ds1_vhi_ann = (tarea*ds1_ann['hi']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", - "ds2_vhi_ann = (tarea*ds2_ann['hi']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", + "ds1_vhi_ann = (tarea * ds1_ann[\"hi\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", + "ds2_vhi_ann = (tarea * ds2_ann[\"hi\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", "\n", - "ds1_vhs_ann = (tarea*ds1_ann['hs']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", - "ds2_vhs_ann = (tarea*ds2_ann['hs']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", + "ds1_vhs_ann = (tarea * ds1_ann[\"hs\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", + "ds2_vhs_ann = (tarea * ds2_ann[\"hs\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", "\n", - "fig = plt.figure(figsize=(10,10),tight_layout=True)\n", + "fig = plt.figure(figsize=(10, 10), tight_layout=True)\n", "\n", - "ax = fig.add_subplot(3,1,1)\n", + "ax = fig.add_subplot(3, 1, 1)\n", "ds1_vhi_ann.plot()\n", "ds2_vhi_ann.plot()\n", "\n", - "plt.ylim((0,10))\n", + "plt.ylim((0, 10))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"NH Annual Mean Sea Ice Volume $m x 10^{13}$\")\n", - "plt.legend([case1,case2])\n", + "plt.legend([case1, case2])\n", "\n", - "ax = fig.add_subplot(3,1,2)\n", + "ax = fig.add_subplot(3, 1, 2)\n", "ds1_vhs_ann.plot()\n", "ds2_vhs_ann.plot()\n", "\n", - "plt.ylim((0,1))\n", + "plt.ylim((0, 1))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"NH Annual Mean Snow Volume $m x 10^{13}$\")\n", - "plt.legend([case1,case2])\n", + "plt.legend([case1, case2])\n", "\n", - "ax = fig.add_subplot(3,1,3)\n", + "ax = fig.add_subplot(3, 1, 3)\n", "ds1_area_ann.plot()\n", "ds2_area_ann.plot()\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"NH Annual Mean Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -286,16 +319,16 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area = (tarea*ds1.aice).where(TLAT<0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area = (tarea*ds2.aice).where(TLAT<0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area = (tarea * ds1.aice).where(TLAT < 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area = (tarea * ds2.aice).where(TLAT < 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", "ds1_area.plot()\n", "ds2_area.plot()\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"SH Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -305,16 +338,16 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area_ann = (tarea*ds1_ann.aice).where(TLAT<0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area_ann = (tarea*ds2_ann.aice).where(TLAT<0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area_ann = (tarea * ds1_ann.aice).where(TLAT < 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area_ann = (tarea * ds2_ann.aice).where(TLAT < 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", "ds1_area_ann.plot()\n", "ds2_area_ann.plot()\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"SH Annual Mean Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -328,51 +361,62 @@ "source": [ "### Read in the NSIDC data from files\n", "\n", - "path_nsidc = '/glade/campaign/cesm/development/pcwg/ice/data/NSIDC_SeaIce_extent/'\n", - "\n", - "jan_nsidc = pd.read_csv(path_nsidc+'N_01_extent_v3.0.csv',na_values=['-99.9'])\n", - "feb_nsidc = pd.read_csv(path_nsidc+'N_02_extent_v3.0.csv',na_values=['-99.9'])\n", - "mar_nsidc = pd.read_csv(path_nsidc+'N_03_extent_v3.0.csv',na_values=['-99.9'])\n", - "apr_nsidc = pd.read_csv(path_nsidc+'N_04_extent_v3.0.csv',na_values=['-99.9'])\n", - "may_nsidc = pd.read_csv(path_nsidc+'N_05_extent_v3.0.csv',na_values=['-99.9'])\n", - "jun_nsidc = pd.read_csv(path_nsidc+'N_06_extent_v3.0.csv',na_values=['-99.9'])\n", - "jul_nsidc = pd.read_csv(path_nsidc+'N_07_extent_v3.0.csv',na_values=['-99.9'])\n", - "aug_nsidc = pd.read_csv(path_nsidc+'N_08_extent_v3.0.csv',na_values=['-99.9'])\n", - "sep_nsidc = pd.read_csv(path_nsidc+'N_09_extent_v3.0.csv',na_values=['-99.9'])\n", - "oct_nsidc = pd.read_csv(path_nsidc+'N_10_extent_v3.0.csv',na_values=['-99.9'])\n", - "nov_nsidc = pd.read_csv(path_nsidc+'N_11_extent_v3.0.csv',na_values=['-99.9'])\n", - "dec_nsidc = pd.read_csv(path_nsidc+'N_12_extent_v3.0.csv',na_values=['-99.9'])\n", - "\n", - "jan_area = jan_nsidc.iloc[:,5].values\n", - "feb_area = feb_nsidc.iloc[:,5].values\n", - "mar_area = mar_nsidc.iloc[:,5].values\n", - "apr_area = apr_nsidc.iloc[:,5].values\n", - "may_area = may_nsidc.iloc[:,5].values\n", - "jun_area = jun_nsidc.iloc[:,5].values\n", - "jul_area = jul_nsidc.iloc[:,5].values\n", - "aug_area = aug_nsidc.iloc[:,5].values\n", - "sep_area = sep_nsidc.iloc[:,5].values\n", - "oct_area = oct_nsidc.iloc[:,5].values\n", - "nov_area = nov_nsidc.iloc[:,5].values\n", - "dec_area = dec_nsidc.iloc[:,5].values\n", - "\n", - "jan_ext = jan_nsidc.iloc[:,4].values\n", - "feb_ext = feb_nsidc.iloc[:,4].values\n", - "mar_ext = mar_nsidc.iloc[:,4].values\n", - "apr_ext = apr_nsidc.iloc[:,4].values\n", - "may_ext = may_nsidc.iloc[:,4].values\n", - "jun_ext = jun_nsidc.iloc[:,4].values\n", - "jul_ext = jul_nsidc.iloc[:,4].values\n", - "aug_ext = aug_nsidc.iloc[:,4].values\n", - "sep_ext = sep_nsidc.iloc[:,4].values\n", - "oct_ext = oct_nsidc.iloc[:,4].values\n", - "nov_ext = nov_nsidc.iloc[:,4].values\n", - "dec_ext = dec_nsidc.iloc[:,4].values\n", + "path_nsidc = \"/glade/campaign/cesm/development/pcwg/ice/data/NSIDC_SeaIce_extent/\"\n", + "\n", + "jan_nsidc = pd.read_csv(path_nsidc + \"N_01_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "feb_nsidc = pd.read_csv(path_nsidc + \"N_02_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "mar_nsidc = pd.read_csv(path_nsidc + \"N_03_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "apr_nsidc = pd.read_csv(path_nsidc + \"N_04_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "may_nsidc = pd.read_csv(path_nsidc + \"N_05_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "jun_nsidc = pd.read_csv(path_nsidc + \"N_06_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "jul_nsidc = pd.read_csv(path_nsidc + \"N_07_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "aug_nsidc = pd.read_csv(path_nsidc + \"N_08_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "sep_nsidc = pd.read_csv(path_nsidc + \"N_09_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "oct_nsidc = pd.read_csv(path_nsidc + \"N_10_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "nov_nsidc = pd.read_csv(path_nsidc + \"N_11_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "dec_nsidc = pd.read_csv(path_nsidc + \"N_12_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "\n", + "jan_area = jan_nsidc.iloc[:, 5].values\n", + "feb_area = feb_nsidc.iloc[:, 5].values\n", + "mar_area = mar_nsidc.iloc[:, 5].values\n", + "apr_area = apr_nsidc.iloc[:, 5].values\n", + "may_area = may_nsidc.iloc[:, 5].values\n", + "jun_area = jun_nsidc.iloc[:, 5].values\n", + "jul_area = jul_nsidc.iloc[:, 5].values\n", + "aug_area = aug_nsidc.iloc[:, 5].values\n", + "sep_area = sep_nsidc.iloc[:, 5].values\n", + "oct_area = oct_nsidc.iloc[:, 5].values\n", + "nov_area = nov_nsidc.iloc[:, 5].values\n", + "dec_area = dec_nsidc.iloc[:, 5].values\n", + "\n", + "jan_ext = jan_nsidc.iloc[:, 4].values\n", + "feb_ext = feb_nsidc.iloc[:, 4].values\n", + "mar_ext = mar_nsidc.iloc[:, 4].values\n", + "apr_ext = apr_nsidc.iloc[:, 4].values\n", + "may_ext = may_nsidc.iloc[:, 4].values\n", + "jun_ext = jun_nsidc.iloc[:, 4].values\n", + "jul_ext = jul_nsidc.iloc[:, 4].values\n", + "aug_ext = aug_nsidc.iloc[:, 4].values\n", + "sep_ext = sep_nsidc.iloc[:, 4].values\n", + "oct_ext = oct_nsidc.iloc[:, 4].values\n", + "nov_ext = nov_nsidc.iloc[:, 4].values\n", + "dec_ext = dec_nsidc.iloc[:, 4].values\n", "\n", "print(dec_ext)\n", - "nsidc_clim = [np.nanmean(jan_ext[0:35]),np.nanmean(feb_ext[0:35]),np.nanmean(mar_ext[0:35]),np.nanmean(apr_ext[0:35]),\n", - " np.nanmean(may_ext[0:35]),np.nanmean(jun_ext[0:35]),np.nanmean(jul_ext[0:35]),np.nanmean(aug_ext[0:35]),\n", - " np.nanmean(sep_ext[0:35]),np.nanmean(oct_ext[0:35]),np.nanmean(nov_ext[0:35]),np.nanmean(dec_ext[0:35])]\n", + "nsidc_clim = [\n", + " np.nanmean(jan_ext[0:35]),\n", + " np.nanmean(feb_ext[0:35]),\n", + " np.nanmean(mar_ext[0:35]),\n", + " np.nanmean(apr_ext[0:35]),\n", + " np.nanmean(may_ext[0:35]),\n", + " np.nanmean(jun_ext[0:35]),\n", + " np.nanmean(jul_ext[0:35]),\n", + " np.nanmean(aug_ext[0:35]),\n", + " np.nanmean(sep_ext[0:35]),\n", + " np.nanmean(oct_ext[0:35]),\n", + " np.nanmean(nov_ext[0:35]),\n", + " np.nanmean(dec_ext[0:35]),\n", + "]\n", "\n", "plt.plot(nsidc_clim)" ] @@ -384,28 +428,27 @@ "metadata": {}, "outputs": [], "source": [ + "aice1_month = ds1[\"aice\"].groupby(\"time.month\").mean(dim=\"time\", skipna=True)\n", + "aice2_month = ds2[\"aice\"].groupby(\"time.month\").mean(dim=\"time\", skipna=True)\n", "\n", - "aice1_month = ds1['aice'].groupby(\"time.month\").mean(dim=\"time\",skipna=True)\n", - "aice2_month = ds2['aice'].groupby(\"time.month\").mean(dim=\"time\",skipna=True)\n", + "mask_tmp1 = np.where(np.logical_and(aice1_month > 0.15, ds1[\"TLAT\"] > 0), 1.0, 0.0)\n", + "mask_tmp2 = np.where(np.logical_and(aice2_month > 0.15, ds1[\"TLAT\"] > 0), 1.0, 0.0)\n", "\n", - "mask_tmp1 = np.where(np.logical_and(aice1_month > 0.15, ds1['TLAT'] > 0), 1., 0.)\n", - "mask_tmp2 = np.where(np.logical_and(aice2_month > 0.15, ds1['TLAT'] > 0), 1., 0.)\n", + "mask_ext1 = xr.DataArray(data=mask_tmp1, dims=[\"month\", \"nj\", \"ni\"])\n", + "mask_ext2 = xr.DataArray(data=mask_tmp2, dims=[\"month\", \"nj\", \"ni\"])\n", "\n", - "mask_ext1 = xr.DataArray(data=mask_tmp1,dims=[\"month\",\"nj\", \"ni\"])\n", - "mask_ext2 = xr.DataArray(data=mask_tmp2,dims=[\"month\",\"nj\", \"ni\"])\n", "\n", - "\n", - "ext1 = (mask_ext1*tarea).sum(['ni','nj'])*1.0e-12\n", - "ext2 = (mask_ext2*tarea).sum(['ni','nj'])*1.0e-12\n", + "ext1 = (mask_ext1 * tarea).sum([\"ni\", \"nj\"]) * 1.0e-12\n", + "ext2 = (mask_ext2 * tarea).sum([\"ni\", \"nj\"]) * 1.0e-12\n", "\n", "plt.plot(ext1)\n", "plt.plot(ext2)\n", "plt.plot(nsidc_clim)\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"Climatological Seasonal Cycle Ice Extent $m x 10^{12}$\")\n", - "plt.legend([case1,case2,\"NSIDC\"])\n" + "plt.legend([case1, case2, \"NSIDC\"])" ] }, { @@ -415,8 +458,8 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area = (tarea*ds1.aice).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area = (tarea*ds2.aice).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area = (tarea * ds1.aice).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area = (tarea * ds2.aice).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", "ds1_sep = ds1_area.sel(time=(ds1_area.time.dt.month == 9))\n", "ds2_sep = ds2_area.sel(time=(ds2_area.time.dt.month == 9))\n", @@ -425,10 +468,10 @@ "plt.plot(ds2_sep)\n", "plt.plot(sep_area)\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"Sea Ice Area $mx10^{12}$\")\n", - "plt.legend([case1,case2,\"NSIDC\"])" + "plt.legend([case1, case2, \"NSIDC\"])" ] }, { @@ -438,26 +481,25 @@ "metadata": {}, "outputs": [], "source": [ + "latm = cice_masks[\"Lab_lat\"]\n", + "lonm = cice_masks[\"Lab_lon\"]\n", "\n", - "latm = cice_masks['Lab_lat']\n", - "lonm = cice_masks['Lab_lon']\n", - "\n", - "lon = np.where(TLON < 0, TLON+360.,TLON)\n", + "lon = np.where(TLON < 0, TLON + 360.0, TLON)\n", "\n", - "mask1 = np.where(np.logical_and(TLAT > latm[0], TLAT < latm[1]),1.,0.)\n", - "mask2 = np.where(np.logical_or(lon > lonm[0], lon < lonm[1]),1.,0.)\n", - "mask = mask1*mask2\n", + "mask1 = np.where(np.logical_and(TLAT > latm[0], TLAT < latm[1]), 1.0, 0.0)\n", + "mask2 = np.where(np.logical_or(lon > lonm[0], lon < lonm[1]), 1.0, 0.0)\n", + "mask = mask1 * mask2\n", "\n", - "ds1_lab = (mask*tarea*ds1.aice).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_lab = (mask*tarea*ds2.aice).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_lab = (mask * tarea * ds1.aice).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_lab = (mask * tarea * ds2.aice).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", "ds1_lab.plot()\n", "ds2_lab.plot()\n", "\n", - "plt.ylim((0,10))\n", + "plt.ylim((0, 10))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"Labrador Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -467,14 +509,16 @@ "metadata": {}, "outputs": [], "source": [ - "uvel1 = ds1_ann['uvel'].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - "vvel1 = ds1_ann['vvel'].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - "uvel2 = ds2_ann['uvel'].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - "vvel2 = ds2_ann['vvel'].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - "ds_angle = xr.open_dataset(\"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/angle_tx2_3v2.nc\")\n", - "angle = ds_angle['ANGLE']\n", - "\n", - "vect_diff(uvel1,vvel1,uvel2,vvel2,angle,\"N\",case1,case2,TLAT,TLON)" + "uvel1 = ds1_ann[\"uvel\"].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + "vvel1 = ds1_ann[\"vvel\"].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + "uvel2 = ds2_ann[\"uvel\"].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + "vvel2 = ds2_ann[\"vvel\"].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + "ds_angle = xr.open_dataset(\n", + " \"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/angle_tx2_3v2.nc\"\n", + ")\n", + "angle = ds_angle[\"ANGLE\"]\n", + "\n", + "vect_diff(uvel1, vvel1, uvel2, vvel2, angle, \"N\", case1, case2, TLAT, TLON)" ] }, { @@ -484,7 +528,7 @@ "metadata": {}, "outputs": [], "source": [ - "vect_diff(uvel1,vvel1,uvel2,vvel2,angle,\"S\",case1,case2,TLAT,TLON)" + "vect_diff(uvel1, vvel1, uvel2, vvel2, angle, \"S\", case1, case2, TLAT, TLON)" ] }, { diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index 4bf9c8f8..80ca09e3 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -1,27 +1,28 @@ +from __future__ import annotations -import numpy as np -import matplotlib as mpl -import matplotlib.pyplot as plt -import matplotlib.path as mpath -from matplotlib.gridspec import GridSpec import cartopy.crs as ccrs import cartopy.feature as cfeature +import matplotlib.path as mpath +import matplotlib.pyplot as plt +import numpy as np +from matplotlib.gridspec import GridSpec + + +def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON): + uvel_rot1 = uvel1 * np.cos(angle) - vvel1 * np.sin(angle) + vvel_rot1 = uvel1 * np.sin(angle) + vvel1 * np.cos(angle) + uvel_rot2 = uvel2 * np.cos(angle) - vvel2 * np.sin(angle) + vvel_rot2 = uvel2 * np.sin(angle) + vvel2 * np.cos(angle) + + speed1 = np.sqrt(uvel1 * uvel1 + vvel1 * vvel1) + speed2 = np.sqrt(uvel2 * uvel2 + vvel2 * vvel2) + + uvel_diff = uvel_rot2 - uvel_rot1 + vvel_diff = vvel_rot2 - vvel_rot1 + speed_diff = speed2 - speed1 -def vect_diff(uvel1,vvel1,uvel2,vvel2,angle,proj,case1,case2,TLAT,TLON): - uvel_rot1 = uvel1*np.cos(angle)-vvel1*np.sin(angle) - vvel_rot1 = uvel1*np.sin(angle)+vvel1*np.cos(angle) - uvel_rot2 = uvel2*np.cos(angle)-vvel2*np.sin(angle) - vvel_rot2 = uvel2*np.sin(angle)+vvel2*np.cos(angle) - - speed1 = np.sqrt(uvel1*uvel1+vvel1*vvel1) - speed2 = np.sqrt(uvel2*uvel2+vvel2*vvel2) - - uvel_diff = uvel_rot2-uvel_rot1 - vvel_diff = vvel_rot2-vvel_rot1 - speed_diff = speed2-speed1 - # make circular boundary for polar stereographic circular plots - theta = np.linspace(0, 2*np.pi, 100) + theta = np.linspace(0, 2 * np.pi, 100) center, radius = [0.5, 0.5], 0.5 verts = np.vstack([np.sin(theta), np.cos(theta)]).T circle = mpath.Path(verts * radius + center) @@ -30,97 +31,148 @@ def vect_diff(uvel1,vvel1,uvel2,vvel2,angle,proj,case1,case2,TLAT,TLON): fig = plt.figure(tight_layout=True) gs = GridSpec(2, 4) - if (proj == "N"): - ax = fig.add_subplot(gs[0,:2], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[0,:2], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + if proj == "N": + ax = fig.add_subplot(gs[0, :2], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[0, :2], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - speed1, - vmin = 0., - vmax = 0.5, - cmap="tab20", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case1,fontsize=10) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + speed1, + vmin=0.0, + vmax=0.5, + cmap="tab20", + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case1, fontsize=10) intv = 5 - ## add vectors - Q = ax.quiver(TLON[::intv,::intv].values,TLAT[::intv,::intv].values, - uvel_rot1[::intv,::intv].values,vvel_rot1[::intv,::intv].values, - color = 'black', scale=1., - transform=ccrs.PlateCarree()) + # add vectors + Q = ax.quiver( + TLON[::intv, ::intv].values, + TLAT[::intv, ::intv].values, + uvel_rot1[::intv, ::intv].values, + vvel_rot1[::intv, ::intv].values, + color="black", + scale=1.0, + transform=ccrs.PlateCarree(), + ) units = "cm/s" - qk = ax.quiverkey(Q,0.85,0.025,0.10,r'10 '+units,labelpos='S', coordinates='axes',color='black',zorder=2) - - if (proj == "N"): - ax = fig.add_subplot(gs[0,2:], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[0,2:], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - + ax.quiverkey( + Q, + 0.85, + 0.025, + 0.10, + r"10 " + units, + labelpos="S", + coordinates="axes", + color="black", + zorder=2, + ) + + if proj == "N": + ax = fig.add_subplot(gs[0, 2:], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[0, 2:], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - speed2, - vmin = 0., - vmax = 0.5, - cmap="tab20", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case1,fontsize=10) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + speed2, + vmin=0.0, + vmax=0.5, + cmap="tab20", + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case1, fontsize=10) intv = 5 - ## add vectors - Q = ax.quiver(TLON[::intv,::intv].values,TLAT[::intv,::intv].values, - uvel_rot2[::intv,::intv].values,vvel_rot2[::intv,::intv].values, - color = 'black', scale=1., - transform=ccrs.PlateCarree()) + # add vectors + Q = ax.quiver( + TLON[::intv, ::intv].values, + TLAT[::intv, ::intv].values, + uvel_rot2[::intv, ::intv].values, + vvel_rot2[::intv, ::intv].values, + color="black", + scale=1.0, + transform=ccrs.PlateCarree(), + ) units = "cm/s" - qk = ax.quiverkey(Q,0.85,0.025,0.10,r'10 '+units,labelpos='S', coordinates='axes',color='black',zorder=2) - - if (proj == "N"): - ax = fig.add_subplot(gs[1,1:3], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[1,1:3], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - + ax.quiverkey( + Q, + 0.85, + 0.025, + 0.10, + r"10 " + units, + labelpos="S", + coordinates="axes", + color="black", + zorder=2, + ) + + if proj == "N": + ax = fig.add_subplot(gs[1, 1:3], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[1, 1:3], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - speed_diff, - vmin = -0.2, - vmax = 0.2, - cmap="seismic", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case2+"-"+case1,fontsize=10) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + speed_diff, + vmin=-0.2, + vmax=0.2, + cmap="seismic", + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case2 + "-" + case1, fontsize=10) intv = 5 - ## add vectors - Q = ax.quiver(TLON[::intv,::intv].values,TLAT[::intv,::intv].values, - uvel_diff[::intv,::intv].values,vvel_diff[::intv,::intv].values, - color = 'black', scale=1., - transform=ccrs.PlateCarree()) + # add vectors + Q = ax.quiver( + TLON[::intv, ::intv].values, + TLAT[::intv, ::intv].values, + uvel_diff[::intv, ::intv].values, + vvel_diff[::intv, ::intv].values, + color="black", + scale=1.0, + transform=ccrs.PlateCarree(), + ) units = "cm/s" - qk = ax.quiverkey(Q,0.85,0.025,0.10,r'10 '+units,labelpos='S', coordinates='axes',color='black',zorder=2) + ax.quiverkey( + Q, + 0.85, + 0.025, + 0.10, + r"10 " + units, + labelpos="S", + coordinates="axes", + color="black", + zorder=2, + ) plt.suptitle("Velocity m/s") diff --git a/examples/nblibrary/lnd/land_comparison.ipynb b/examples/nblibrary/lnd/land_comparison.ipynb index af87618e..e32dca59 100644 --- a/examples/nblibrary/lnd/land_comparison.ipynb +++ b/examples/nblibrary/lnd/land_comparison.ipynb @@ -48,8 +48,11 @@ "outputs": [], "source": [ "CESM_output_dir = \"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/CESM_output_for_testing\"\n", - "type = ['1850pAD','1850pSASU']\n", - "cases = ['ctsm51d159_f45_GSWP3_bgccrop_1850pAD', 'ctsm51d159_f45_GSWP3_bgccrop_1850pSASU']" + "type = [\"1850pAD\", \"1850pSASU\"]\n", + "cases = [\n", + " \"ctsm51d159_f45_GSWP3_bgccrop_1850pAD\",\n", + " \"ctsm51d159_f45_GSWP3_bgccrop_1850pSASU\",\n", + "]" ] }, { @@ -61,13 +64,11 @@ "source": [ "# -- read only these variables from the whole netcdf files\n", "# average over time\n", - "def preprocess (ds):\n", - " variables = ['TOTECOSYSC', 'TOTVEGC','TOTSOMC',\n", - " 'SOM_PAS_C_vr']\n", + "def preprocess(ds):\n", + " variables = [\"TOTECOSYSC\", \"TOTVEGC\", \"TOTSOMC\", \"SOM_PAS_C_vr\"]\n", "\n", - " ds_new= ds[variables]\n", - " return ds_new\n", - "\n" + " ds_new = ds[variables]\n", + " return ds_new" ] }, { @@ -78,27 +79,31 @@ "outputs": [], "source": [ "for c in range(len(cases)):\n", - "\n", - " sim_files =[]\n", + " sim_files = []\n", " sim_path = f\"{CESM_output_dir}/{cases[c]}/lnd/hist\"\n", " sim_files.extend(sorted(glob(join(f\"{sim_path}/{cases[c]}.clm2.h0.*.nc\"))))\n", - " # subset last 5 years of data \n", + " # subset last 5 years of data\n", " sim_files = sim_files[-60:None]\n", " print(f\"All simulation files for {cases[c]}: [{len(sim_files)} files]\")\n", "\n", - " temp = xr.open_mfdataset(sim_files, decode_times=True, combine='by_coords',\n", - " parallel=False, preprocess=preprocess).mean('time')\n", - " \n", + " temp = xr.open_mfdataset(\n", + " sim_files,\n", + " decode_times=True,\n", + " combine=\"by_coords\",\n", + " parallel=False,\n", + " preprocess=preprocess,\n", + " ).mean(\"time\")\n", + "\n", " if c == 0:\n", " ds = temp\n", " else:\n", - " ds = xr.concat([ds, temp],'case')\n", - " \n", + " ds = xr.concat([ds, temp], \"case\")\n", + "\n", "ds = ds.assign_coords({\"case\": type})\n", "\n", "# Calculate differences\n", - "diff = ds.isel(case=1) - ds.isel(case=0) \n", - "rel_diff = diff / ds.isel(case=1) " + "diff = ds.isel(case=1) - ds.isel(case=0)\n", + "rel_diff = diff / ds.isel(case=1)" ] }, { @@ -116,24 +121,24 @@ "metadata": {}, "outputs": [], "source": [ - "plt.figure(figsize=[14,14])\n", - "var = ['TOTECOSYSC' ,'TOTVEGC','TOTSOMC']\n", - "#var = ['GPP' ,'ELAI','ALT']\n", + "plt.figure(figsize=[14, 14])\n", + "var = [\"TOTECOSYSC\", \"TOTVEGC\", \"TOTSOMC\"]\n", + "# var = ['GPP' ,'ELAI','ALT']\n", "i = 1\n", "for v in range(len(var)):\n", " plt.subplot(3, 2, i)\n", - " ds[var[v]].isel(case=1).plot(robust=True) \n", - " plt.title(\"pSASU \"+ var[v])\n", + " ds[var[v]].isel(case=1).plot(robust=True)\n", + " plt.title(\"pSASU \" + var[v])\n", " plt.xlabel(None)\n", " plt.ylabel(None)\n", - " i = i+1\n", - " \n", + " i = i + 1\n", + "\n", " plt.subplot(3, 2, i)\n", - " diff[var[v]].plot(robust=True) \n", - " plt.title(\"pSASU - pAD \"+ var[v]) \n", + " diff[var[v]].plot(robust=True)\n", + " plt.title(\"pSASU - pAD \" + var[v])\n", " plt.xlabel(None)\n", - " plt.ylabel(None) \n", - " i = i+1\n" + " plt.ylabel(None)\n", + " i = i + 1" ] }, { @@ -152,40 +157,40 @@ "metadata": {}, "outputs": [], "source": [ - "plt.figure(figsize=[10,5])\n", - "var = 'SOM_PAS_C_vr'\n", + "plt.figure(figsize=[10, 5])\n", + "var = \"SOM_PAS_C_vr\"\n", "plt.subplot(1, 4, 1)\n", - "ds[var].sel(lon=300,lat=-10, method='nearest').plot(hue='case',y='levsoi') ;\n", - "plt.gca().invert_yaxis() ;\n", - "plt.title('Tropical')\n", - "plt.ylabel('depth (m)')\n", - "plt.xscale('log',base=10) \n", - "#plt.ylim(6,0)\n", + "ds[var].sel(lon=300, lat=-10, method=\"nearest\").plot(hue=\"case\", y=\"levsoi\")\n", + "plt.gca().invert_yaxis()\n", + "plt.title(\"Tropical\")\n", + "plt.ylabel(\"depth (m)\")\n", + "plt.xscale(\"log\", base=10)\n", + "# plt.ylim(6,0)\n", "\n", "\n", "plt.subplot(1, 4, 2)\n", - "ds[var].sel(lon=25,lat=50, method='nearest').plot(hue='case',y='levsoi') ;\n", - "plt.gca().invert_yaxis() ;\n", - "plt.title('Temperate')\n", + "ds[var].sel(lon=25, lat=50, method=\"nearest\").plot(hue=\"case\", y=\"levsoi\")\n", + "plt.gca().invert_yaxis()\n", + "plt.title(\"Temperate\")\n", "plt.ylabel(None)\n", - "plt.xscale('log',base=10) \n", - "#plt.ylim(6,0)\n", + "plt.xscale(\"log\", base=10)\n", + "# plt.ylim(6,0)\n", "\n", "plt.subplot(1, 4, 3)\n", - "ds[var].sel(lon=155,lat=66, method='nearest').plot(hue='case',y='levsoi') ;\n", - "plt.gca().invert_yaxis() ;\n", - "plt.title('Arctic')\n", + "ds[var].sel(lon=155, lat=66, method=\"nearest\").plot(hue=\"case\", y=\"levsoi\")\n", + "plt.gca().invert_yaxis()\n", + "plt.title(\"Arctic\")\n", "plt.ylabel(None)\n", - "plt.xscale('log',base=10) \n", - "#plt.ylim(6,0)\n", + "plt.xscale(\"log\", base=10)\n", + "# plt.ylim(6,0)\n", "\n", "\n", "plt.subplot(1, 4, 4)\n", - "ds[var].sel(lon=155,lat=66, method='nearest').plot(hue='case',y='levsoi') ;\n", - "plt.gca().invert_yaxis() ;\n", - "plt.title('Arctic')\n", + "ds[var].sel(lon=155, lat=66, method=\"nearest\").plot(hue=\"case\", y=\"levsoi\")\n", + "plt.gca().invert_yaxis()\n", + "plt.title(\"Arctic\")\n", "plt.ylabel(None)\n", - "#plt.ylim(6,0)" + "# plt.ylim(6,0)" ] } ], diff --git a/examples/nblibrary/ocn/ocean_surface.ipynb b/examples/nblibrary/ocn/ocean_surface.ipynb index 4a963205..5ab9314c 100644 --- a/examples/nblibrary/ocn/ocean_surface.ipynb +++ b/examples/nblibrary/ocn/ocean_surface.ipynb @@ -65,7 +65,7 @@ "outputs": [], "source": [ "CESM_output_dir = \"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/CESM_output_for_testing\"\n", - "serial = False # use dask LocalCluster\n", + "serial = False # use dask LocalCluster\n", "Case = \"b.e23_alpha16b.BLT1850.ne30_t232.054\"\n", "savefigs = False\n", "mom6_tools_config = {}\n", @@ -78,8 +78,8 @@ "metadata": {}, "outputs": [], "source": [ - "OUTDIR = f'{CESM_output_dir}/{Case}/ocn/hist/'\n", - "print('Output directory is:', OUTDIR)" + "OUTDIR = f\"{CESM_output_dir}/{Case}/ocn/hist/\"\n", + "print(\"Output directory is:\", OUTDIR)" ] }, { @@ -95,12 +95,14 @@ "# The following parameters must be set accordingly\n", "######################################################\n", "\n", + "\n", "# create an empty class object\n", "class args:\n", - " pass\n", + " pass\n", + "\n", "\n", - "args.start_date = mom6_tools_config['start_date']\n", - "args.end_date = mom6_tools_config['end_date']\n", + "args.start_date = mom6_tools_config[\"start_date\"]\n", + "args.end_date = mom6_tools_config[\"end_date\"]\n", "args.casename = Case\n", "args.native = f\"{Case}.{mom6_tools_config['Fnames']['native']}\"\n", "args.static = f\"{Case}.{mom6_tools_config['Fnames']['static']}\"\n", @@ -114,15 +116,15 @@ "metadata": {}, "outputs": [], "source": [ - "if not os.path.isdir('PNG/BLD'):\n", - " print('Creating a directory to place figures (PNG/BLD)... \\n')\n", - " os.system('mkdir -p PNG/BLD')\n", - "if not os.path.isdir('PNG/MLD'):\n", - " print('Creating a directory to place figures (PNG/MLD)... \\n')\n", - " os.system('mkdir -p PNG/MLD')\n", - "if not os.path.isdir('ncfiles'):\n", - " print('Creating a directory to place netcdf files (ncfiles)... \\n')\n", - " os.system('mkdir ncfiles') " + "if not os.path.isdir(\"PNG/BLD\"):\n", + " print(\"Creating a directory to place figures (PNG/BLD)... \\n\")\n", + " os.system(\"mkdir -p PNG/BLD\")\n", + "if not os.path.isdir(\"PNG/MLD\"):\n", + " print(\"Creating a directory to place figures (PNG/MLD)... \\n\")\n", + " os.system(\"mkdir -p PNG/MLD\")\n", + "if not os.path.isdir(\"ncfiles\"):\n", + " print(\"Creating a directory to place netcdf files (ncfiles)... \\n\")\n", + " os.system(\"mkdir ncfiles\")" ] }, { @@ -132,10 +134,10 @@ "outputs": [], "source": [ "# Spin up cluster (if running in parallel)\n", - "client=None\n", + "client = None\n", "if not serial:\n", - " cluster = LocalCluster(**lc_kwargs)\n", - " client = Client(cluster)\n", + " cluster = LocalCluster(**lc_kwargs)\n", + " client = Client(cluster)\n", "\n", "client" ] @@ -147,8 +149,8 @@ "outputs": [], "source": [ "# load mom6 grid\n", - "grd = MOM6grid(OUTDIR+args.static)\n", - "grd_xr = MOM6grid(OUTDIR+args.static, xrformat=True)" + "grd = MOM6grid(OUTDIR + args.static)\n", + "grd_xr = MOM6grid(OUTDIR + args.static, xrformat=True)" ] }, { @@ -157,21 +159,23 @@ "metadata": {}, "outputs": [], "source": [ - "print('Reading native dataset...')\n", + "print(\"Reading native dataset...\")\n", "startTime = datetime.now()\n", "\n", + "\n", "def preprocess(ds):\n", - " ''' Compute montly averages and return the dataset with variables'''\n", - " variables = ['oml','mlotst','tos','SSH', 'SSU', 'SSV', 'speed', 'time_bnds']\n", - " for v in variables:\n", - " if v not in ds.variables:\n", - " ds[v] = xr.zeros_like(ds.SSH)\n", - " return ds[variables]\n", + " \"\"\"Compute montly averages and return the dataset with variables\"\"\"\n", + " variables = [\"oml\", \"mlotst\", \"tos\", \"SSH\", \"SSU\", \"SSV\", \"speed\", \"time_bnds\"]\n", + " for v in variables:\n", + " if v not in ds.variables:\n", + " ds[v] = xr.zeros_like(ds.SSH)\n", + " return ds[variables]\n", + "\n", "\n", - "ds1 = xr.open_mfdataset(OUTDIR+args.native, parallel=False)\n", + "ds1 = xr.open_mfdataset(OUTDIR + args.native, parallel=False)\n", "ds = preprocess(ds1)\n", "\n", - "print('Time elasped: ', datetime.now() - startTime)" + "print(\"Time elasped: \", datetime.now() - startTime)" ] }, { @@ -180,7 +184,7 @@ "metadata": {}, "outputs": [], "source": [ - "print('Selecting data between {} and {}...'.format(args.start_date, args.end_date))\n", + "print(\"Selecting data between {} and {}...\".format(args.start_date, args.end_date))\n", "ds_sel = ds.sel(time=slice(args.start_date, args.end_date))" ] }, @@ -190,10 +194,10 @@ "metadata": {}, "outputs": [], "source": [ - "catalog = intake.open_catalog(mom6_tools_config['oce_cat'])\n", + "catalog = intake.open_catalog(mom6_tools_config[\"oce_cat\"])\n", "mld_obs = catalog[args.mld_obs].to_dask()\n", "# uncomment to list all datasets available\n", - "#list(catalog)" + "# list(catalog)" ] }, { @@ -211,7 +215,7 @@ "source": [ "%matplotlib inline\n", "# MLD\n", - "get_MLD(ds,'mlotst', mld_obs, grd, args)" + "get_MLD(ds, \"mlotst\", mld_obs, grd, args)" ] }, { @@ -227,7 +231,7 @@ "metadata": {}, "outputs": [], "source": [ - "get_BLD(ds, 'oml', grd, args)" + "get_BLD(ds, \"oml\", grd, args)" ] }, { @@ -237,7 +241,7 @@ "outputs": [], "source": [ "# SSH (not working)\n", - "#get_SSH(ds, 'SSH', grd, args)" + "# get_SSH(ds, 'SSH', grd, args)" ] } ],