Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

pyrcel

Package Overview
Dependencies
Maintainers
1
Versions
3
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

pyrcel - npm Package Compare versions

Comparing version
1.3.1
to
1.3.2
+102
.circleci/config.yml
version: 2
jobs:
build-python311:
docker:
- image: continuumio/miniconda3
resource_class: large
working_directory: ~/circleci-pyrcel311
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v2-dependencies
- run: echo "Building Python 3.11 version..."
- run:
name: Create Conda Environment
command: |
conda env create -f ci/requirements-py311.yml
- run:
name: Install and Test Pyrcel Simulation
command: |
source activate pyrcel
pip install -e .
run_parcel examples/simple.yml
build-python310:
docker:
- image: continuumio/miniconda3
resource_class: large
working_directory: ~/circleci-pyrcel310
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v2-dependencies
- run: echo "Building Python 3.10 version..."
- run:
name: Create Conda Environment
command: |
conda env create -f ci/requirements-py310.yml
- run:
name: Install and Test Pyrcel Simulation
command: |
source activate pyrcel
pip install -e .
run_parcel examples/simple.yml
build-python39: # required for runs that don't use workflows
docker:
- image: continuumio/miniconda3
resource_class: large
working_directory: ~/circleci-pyrcel39
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v2-dependencies
- run: echo "Building Python 3.9 version..."
- run:
name: Create Conda Environment
command: |
conda env create -f ci/requirements-py39.yml
- run:
name: Install and Test Pyrcel Simulation
command: |
source activate pyrcel
pip install -e .
run_parcel examples/simple.yml
build-python38: # required for runs that don't use workflows
docker:
- image: continuumio/miniconda3
working_directory: ~/circleci-pyrcel38
resource_class: large
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v2-dependencies
- run: echo "Building Python 3.8 version..."
- run:
name: Create Conda Environment
command: |
conda env create -f ci/requirements-py38.yml
- run:
name: Install and Test Pyrcel Simulation
command: |
source activate pyrcel
pip install -e .
run_parcel examples/simple.yml
workflows:
version: 2
build:
jobs:
- build-python311
- build-python310
- build-python39
- build-python38
# Compiled files
################
*.pyc
*.so
*.c
*.h
*.exe
*.o
*.mod
build/
dist/
# IPython Notebook files
############################
.ipynb_checkpoints/
# Previously generated files
############################
*.pdf
*.csv
*.nc
*.html
pyrcel.egg-info
# Documentation
############################
docs/_build/
docs/generated/
docs/examples/*.rst
docs/examples/*files/
# Other
#######
*~
*#
parcel_model.egg-info
.idea/*
*.dat
*.DS_Store
# .readthedocs.yaml
# Read the Docs configuration file
# Required
version: 2
# Set the version of Python and other tools you might need
build:
os: ubuntu-22.04
tools:
python: "3.10"
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
python:
install:
- requirements: docs/requirements.txt
name: pyrcel
channels:
- conda-forge
dependencies:
- python=3.10
- gcc_linux-64
- gxx_linux-64
- assimulo
- numba
- numpy
- pandas
- pyyaml
- scipy
- xarray
name: pyrcel
channels:
- conda-forge
dependencies:
- python=3.11
- gcc_linux-64
- gxx_linux-64
- assimulo
- numba
- numpy
- pandas
- pyyaml
- scipy
- xarray
name: pyrcel
channels:
- conda-forge
dependencies:
- python=3.8
- gcc_linux-64
- gxx_linux-64
- assimulo
- numba
- numpy
- pandas
- pyyaml
- scipy
- xarray
name: pyrcel
channels:
- conda-forge
dependencies:
- python=3.9
- gcc_linux-64
- gxx_linux-64
- assimulo
- numba
- numpy
- pandas
- pyyaml
- scipy
- xarray
#!/usr/bin/env bash
# Quick script to build the documentation
# Generate .rst files from the notebooks containing pre-rendered examples
cd examples
make
# Go back and generate the stub automethod templates in the API reference
cd ../
make gen
# Now make the html documents
make html
# -*- coding: utf-8 -*-
#
# pyrcel documentation build configuration file, created by
# sphinx-quickstart on Wed Jul 2 15:49:19 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath("../"))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"numpydoc",
"IPython.sphinxext.ipython_directive",
"IPython.sphinxext.ipython_console_highlighting",
]
# Fix toctree bug http://stackoverflow.com/questions/12206334/sphinx-autosummary-toctree-contains-reference-to-nonexisting-document-warnings
numpydoc_show_class_members = False
autosummary_generate = True
numpydoc_class_members_toctree = True
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ["templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"pyrcel"
copyright = u"2019, Daniel Rothenberg"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
from pyrcel import __version__ as version
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build", "templates"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from
# docs.readthedocs.org
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# 'bootstrap_version': "3",
# 'bootswatch_theme': "cosmo", #"yeti"
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), ]
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "pyrceldoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
"index",
"pyrcel.tex",
u"pyrcel documentation",
u"Daniel Rothenberg",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
("index", "pyrcel", u"pyrcel Documentation", [u"Daniel Rothenberg"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"pyrcel",
u"pyrcel Documentation",
u"Daniel Rothenberg",
"pyrcel",
"Adiabatic cloud parcel model for aerosol activation studies",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
name: pyrcel
channels:
- conda-forge
dependencies:
- python==3.7
- assimulo
- pandas
- pyyaml
- scipy
- numba
- numpy
- xarray
- numpydoc
- ipython
- sphinx
- sphinx_rtd_theme

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

.. _example_activate:
.. currentmodule:: parcel_model
Example: Activation
===================
In this example, we will study the effect of updraft speed on the
activation of a lognormal ammonium sulfate accumulation mode aerosol.
.. code:: python
# Suppress warnings
import warnings
warnings.simplefilter('ignore')
import pyrcel as pm
import numpy as np
%matplotlib inline
import matplotlib.pyplot as plt
import seaborn as sns
First, we indicate the parcel's initial thermodynamic conditions.
.. code:: python
P0 = 100000. # Pressure, Pa
T0 = 279. # Temperature, K
S0 = -0.1 # Supersaturation, 1-RH
We next define the aerosol distribution to follow the reference
simulation from `Ghan et al,
2011 <http://onlinelibrary.wiley.com/doi/10.1029/2011MS000074/abstract>`__
.. code:: python
aer = pm.AerosolSpecies('ammonium sulfate',
pm.Lognorm(mu=0.05, sigma=2.0, N=1000.),
kappa=0.7, bins=100)
Loop over updraft several velocities in the range 0.1 - 10.0 m/s. We
will peform a detailed parcel model calculation, as well as calculations
with two activation parameterizations. We will also use an accommodation
coefficient of :math:`\alpha_c = 0.1`, following the recommendations of
`Raatikainen et al (2013) <http://www.pnas.org/content/110/10/3760>`__.
First, the parcel model calculations:
.. code:: python
from pyrcel import binned_activation
Vs = np.logspace(-1, np.log10(10,), 11.)[::-1] # 0.1 - 5.0 m/s
accom = 0.1
smaxes, act_fracs = [], []
for V in Vs:
# Initialize the model
model = pm.ParcelModel([aer,], V, T0, S0, P0, accom=accom, console=False)
par_out, aer_out = model.run(t_end=2500., dt=1.0, solver='cvode',
output='dataframes', terminate=True)
print(V, par_out.S.max())
# Extract the supersaturation/activation details from the model
# output
S_max = par_out['S'].max()
time_at_Smax = par_out['S'].argmax()
wet_sizes_at_Smax = aer_out['ammonium sulfate'].ix[time_at_Smax].iloc[0]
wet_sizes_at_Smax = np.array(wet_sizes_at_Smax.tolist())
frac_eq, _, _, _ = binned_activation(S_max, T0, wet_sizes_at_Smax, aer)
# Save the output
smaxes.append(S_max)
act_fracs.append(frac_eq)
.. parsed-literal::
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
10.0 0.0156189147154
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
6.3095734448 0.0116683910368
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
3.98107170553 0.00878287310116
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
2.51188643151 0.00664901290831
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
1.58489319246 0.00505644091867
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
1.0 0.00385393398982
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.63095734448 0.00293957320198
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.398107170553 0.00224028774582
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.251188643151 0.00170480101361
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.158489319246 0.0012955732509
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.1 0.000984803827635
Now the activation parameterizations:
.. code:: python
smaxes_arg, act_fracs_arg = [], []
smaxes_mbn, act_fracs_mbn = [], []
for V in Vs:
smax_arg, _, afs_arg = pm.arg2000(V, T0, P0, [aer], accom=accom)
smax_mbn, _, afs_mbn = pm.mbn2014(V, T0, P0, [aer], accom=accom)
smaxes_arg.append(smax_arg)
act_fracs_arg.append(afs_arg[0])
smaxes_mbn.append(smax_mbn)
act_fracs_mbn.append(afs_mbn[0])
Finally, we compile our results into a nice plot for visualization.
.. code:: python
sns.set(context="notebook", style='ticks')
sns.set_palette("husl", 3)
fig, [ax_s, ax_a] = plt.subplots(1, 2, sharex=True, figsize=(10,4))
ax_s.plot(Vs, np.array(smaxes)*100., color='k', lw=2, label="Parcel Model")
ax_s.plot(Vs, np.array(smaxes_mbn)*100., linestyle='None',
marker="o", ms=10, label="MBN2014" )
ax_s.plot(Vs, np.array(smaxes_arg)*100., linestyle='None',
marker="o", ms=10, label="ARG2000" )
ax_s.semilogx()
ax_s.set_ylabel("Superaturation Max, %")
ax_s.set_ylim(0, 2.)
ax_a.plot(Vs, act_fracs, color='k', lw=2, label="Parcel Model")
ax_a.plot(Vs, act_fracs_mbn, linestyle='None',
marker="o", ms=10, label="MBN2014" )
ax_a.plot(Vs, act_fracs_arg, linestyle='None',
marker="o", ms=10, label="ARG2000" )
ax_a.semilogx()
ax_a.set_ylabel("Activated Fraction")
ax_a.set_ylim(0, 1.)
plt.tight_layout()
sns.despine()
for ax in [ax_s, ax_a]:
ax.legend(loc='upper left')
ax.xaxis.set_ticks([0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0])
ax.xaxis.set_ticklabels([0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0])
ax.set_xlabel("Updraft speed, m/s")
.. image:: activate_files/activate_13_0.png

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

.. _example_basic:
.. currentmodule:: parcel_model
Example: Basic Run
==================
In this example, we will setup a simple parcel model simulation
containing two aerosol modes. We will then run the model with a 1 m/s
updraft, and observe how the aerosol population bifurcates into swelled
aerosol and cloud droplets.
.. code:: python
# Suppress warnings
import warnings
warnings.simplefilter('ignore')
import pyrcel as pm
import numpy as np
%matplotlib inline
import matplotlib.pyplot as plt
.. parsed-literal::
Could not find GLIMDA
First, we indicate the parcel's initial thermodynamic conditions.
.. code:: python
P0 = 77500. # Pressure, Pa
T0 = 274. # Temperature, K
S0 = -0.02 # Supersaturation, 1-RH (98% here)
Next, we define the aerosols present in the parcel. The model itself is
agnostic to how the aerosol are specified; it simply expects lists of
the radii of wetted aerosol radii, their number concentration, and their
hygroscopicity. We can make container objects
(:class:``AerosolSpecies``) that wrap all of this information so that we
never need to worry about it.
Here, let's construct two aerosol modes:
+----------+-----------------------+-----------------+---------+--------------------+
| Mode | :math:`\kappa` | Mean size | Std dev | Number Conc |
| | (hygroscopicity) | (micron) | | (cm\*\*-3) |
+==========+=======================+=================+=========+====================+
| sulfate | 0.54 | 0.015 | 1.6 | 850 |
+----------+-----------------------+-----------------+---------+--------------------+
| sea salt | 1.2 | 0.85 | 1.2 | 10 |
+----------+-----------------------+-----------------+---------+--------------------+
We'll define each mode using the :class:``Lognorm`` distribution
packaged with the model.
.. code:: python
sulfate = pm.AerosolSpecies('sulfate',
pm.Lognorm(mu=0.015, sigma=1.6, N=850.),
kappa=0.54, bins=200)
sea_salt = pm.AerosolSpecies('sea salt',
pm.Lognorm(mu=0.85, sigma=1.2, N=10.),
kappa=1.2, bins=40)
The :class:``AerosolSpecies`` class automatically computes
gridded/binned representations of the size distributions. Let's double
check that the aerosol distribution in the model will make sense by
plotting the number concentration in each bin.
.. code:: python
fig = plt.figure(figsize=(10,5))
ax = fig.add_subplot(111)
ax.grid(False, "minor")
sul_c = "#CC0066"
ax.bar(sulfate.rs[:-1], sulfate.Nis*1e-6, np.diff(sulfate.rs),
color=sul_c, label="sulfate", edgecolor="#CC0066")
sea_c = "#0099FF"
ax.bar(sea_salt.rs[:-1], sea_salt.Nis*1e-6, np.diff(sea_salt.rs),
color=sea_c, label="sea salt", edgecolor="#0099FF")
ax.semilogx()
ax.set_xlabel("Aerosol dry radius, micron")
ax.set_ylabel("Aerosl number conc., cm$^{-3}$")
ax.legend(loc='upper right')
.. parsed-literal::
<matplotlib.legend.Legend at 0x10f4baeb8>
.. image:: basic_run_files/basic_run_9_1.png
Actually running the model is very straightforward, and involves just
two steps:
1. Instantiate the model by creating a :class:``ParcelModel`` object.
2. Call the model's :method:``run`` method.
For convenience this process is encoded into several routines in the
``driver`` file, including both a single-strategy routine and an
iterating routine which adjusts the the timestep and numerical
tolerances if the model crashes. However, we can illustrate the simple
model running process here in case you wish to develop your own scheme
for running the model.
.. code:: python
initial_aerosols = [sulfate, sea_salt]
V = 1.0 # updraft speed, m/s
dt = 1.0 # timestep, seconds
t_end = 250./V # end time, seconds... 250 meter simulation
model = pm.ParcelModel(initial_aerosols, V, T0, S0, P0, console=False, accom=0.3)
parcel_trace, aerosol_traces = model.run(t_end, dt, solver='cvode')
If ``console`` is set to ``True``, then some basic debugging output will
be written to the terminal, including the initial equilibrium droplet
size distribution and some numerical solver diagnostics. The model
output can be customized; by default, we get a DataFrame and a Panel of
the parcel state vector and aerosol bin sizes as a function of time (and
height). We can use this to visualize the simulation results, like in
the package's
`README <https://github.com/darothen/parcel_model/blob/master/README.md>`__.
.. code:: python
fig, [axS, axA] = plt.subplots(1, 2, figsize=(10, 4), sharey=True)
axS.plot(parcel_trace['S']*100., parcel_trace['z'], color='k', lw=2)
axT = axS.twiny()
axT.plot(parcel_trace['T'], parcel_trace['z'], color='r', lw=1.5)
Smax = parcel_trace['S'].max()*100
z_at_smax = parcel_trace['z'].ix[parcel_trace['S'].argmax()]
axS.annotate("max S = %0.2f%%" % Smax,
xy=(Smax, z_at_smax),
xytext=(Smax-0.3, z_at_smax+50.),
arrowprops=dict(arrowstyle="->", color='k',
connectionstyle='angle3,angleA=0,angleB=90'),
zorder=10)
axS.set_xlim(0, 0.7)
axS.set_ylim(0, 250)
axT.set_xticks([270, 271, 272, 273, 274])
axT.xaxis.label.set_color('red')
axT.tick_params(axis='x', colors='red')
axS.set_xlabel("Supersaturation, %")
axT.set_xlabel("Temperature, K")
axS.set_ylabel("Height, m")
sulf_array = aerosol_traces['sulfate'].values
sea_array = aerosol_traces['sea salt'].values
ss = axA.plot(sulf_array[:, ::10]*1e6, parcel_trace['z'], color=sul_c,
label="sulfate")
sa = axA.plot(sea_array*1e6, parcel_trace['z'], color=sea_c, label="sea salt")
axA.semilogx()
axA.set_xlim(1e-2, 10.)
axA.set_xticks([1e-2, 1e-1, 1e0, 1e1], [0.01, 0.1, 1.0, 10.0])
axA.legend([ss[0], sa[0]], ['sulfate', 'sea salt'], loc='upper right')
axA.set_xlabel("Droplet radius, micron")
for ax in [axS, axA, axT]:
ax.grid(False, 'both', 'both')
.. image:: basic_run_files/basic_run_13_0.png
In this simple example, the sulfate aerosol population bifurcated into
interstitial aerosol and cloud droplets, while the entire sea salt
population activated. A peak supersaturation of about 0.63% was reached
a few meters above cloud base, where the ambient relative humidity hit
100%.
How many CDNC does this translate into? We can call upon helper methods
from the ``activation`` package to perform these calculations for us:
.. code:: python
from pyrcel import binned_activation
sulf_trace = aerosol_traces['sulfate']
sea_trace = aerosol_traces['sea salt']
ind_final = int(t_end/dt) - 1
T = parcel_trace['T'].iloc[ind_final]
eq_sulf, kn_sulf, alpha_sulf, phi_sulf = \
binned_activation(Smax/100, T, sulf_trace.iloc[ind_final], sulfate)
eq_sulf *= sulfate.total_N
eq_sea, kn_sea, alpha_sea, phi_sea = \
binned_activation(Smax/100, T, sea_trace.iloc[ind_final], sea_salt)
eq_sea *= sea_salt.total_N
print(" CDNC(sulfate) = {:3.1f}".format(eq_sulf))
print(" CDNC(sea salt) = {:3.1f}".format(eq_sea))
print("------------------------")
print(" total = {:3.1f} / {:3.0f} ~ act frac = {:1.2f}".format(
eq_sulf+eq_sea,
sea_salt.total_N+sulfate.total_N,
(eq_sulf+eq_sea)/(sea_salt.total_N+sulfate.total_N)
))
.. parsed-literal::
CDNC(sulfate) = 146.9
CDNC(sea salt) = 10.0
------------------------
total = 156.9 / 860 ~ act frac = 0.18
NOTEBOOKS := $(wildcard *.ipynb)
RSTS := $(NOTEBOOKS:.ipynb=.rst)
.PHONY: all
all: $(RSTS)
%.rst: %.ipynb
jupyter-nbconvert $^ --to rst
clean:
rm *.rst
rm -rf *_files/
rm -rf _build/
Because the Assimulo dependency is difficult to build on third-party content providers (travis, RTD, etc), I've opted to manually control the examples in this directory. Here are the steps to keep the examples up to date:
1. Run each notebook independently, so that the results (figures, etc) it includes are pre-rendered and self-contained
2. Execute the supplied Makefile to convert ipynb -> rst and generate hte rendered figures
3. Commit the newly generated *_files/ folders and all new resources
In the future, this can be simplified by packaging the entire parcel model install. However, that will require either (a) re-opening the ability to use third-party ODE solvers from SciPy, or (b) packaging Assimulo and Sundials into conda for easy access on a third-party server.

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

pyrcel: cloud parcel model
==========================
|DOI|\ |PyPI version|\ |Build Status|\ |Documentation Status|\ |Code Style|
.. |DOI| image:: https://zenodo.org/badge/12927551.svg
:target: https://zenodo.org/badge/latestdoi/12927551
.. |PyPI version| image:: https://badge.fury.io/py/pyrcel.svg
:target: https://badge.fury.io/py/pyrcel
.. |Build Status| image:: https://circleci.com/gh/darothen/pyrcel/tree/master
.svg?style=svg
:target: https://circleci.com/gh/darothen/pyrcel/tree/master
.. |Documentation Status| image:: https://readthedocs.org/projects/pyrcel/badge/?version=stable
:target: http://pyrcel.readthedocs.org/en/stable/?badge=stable
.. |Code Style| image:: https://img.shields.io/badge/code%20style-black-000000
.svg
:target: https://github.com/python/black
This is an implementation of a simple, 0D adiabatic cloud parcel model tool (following `Nenes et al, 2001`_ and `Pruppacher and Klett, 1997`_). It allows flexible descriptions of an initial aerosol population, and simulates the evolution of a proto-cloud droplet population as the parcel ascends adiabatically at either a constant or time/height-dependent updraft speed. Droplet growth within the parcel is tracked on a Lagrangian grid.
.. _Pruppacher and Klett, 1997: http://books.google.com/books?hl=en&lr=&id=1mXN_qZ5sNUC&oi=fnd&pg=PR15&ots=KhdkC6uhB3&sig=PSlNsCeLSB2FvR93Vzo0ptCAnYA#v=onepage&q&f=false
.. _Nenes et al, 2001: http://onlinelibrary.wiley.com/doi/10.1034/j.1600-0889.2001.d01-12.x/abstract
.. image:: figs/model_example.png
You are invited to use the model (in accordance with the `licensing <https://raw
.githubusercontent.com/darothen/pyrcel/master/LICENSE>`_), but please get in
touch with the author via `e-mail <mailto:daniel@danielrothenberg.com>`_ or on
`twitter <https://twitter.com/darothen>`_. p-to-date versions can be obtained
through the model's `github repository <https://github.com/darothen/pyrcel>`_
or directly from the author. If you use the model for research, please cite
`this journal article <http://journals.ametsoc.org/doi/abs/10.1175/JAS-D-15-0223.1>`_
which details the original model formulation:
| Daniel Rothenberg and Chien Wang, 2016: Metamodeling of Droplet Activation for Global Climate Models. *J. Atmos. Sci.*, **73**, 1255–1272. doi: http://dx.doi.org/10.1175/JAS-D-15-0223.1
|
|
Documentation Outline
---------------------
.. toctree::
:maxdepth: 2
:glob:
sci_descr
install
examples/*
parcel
reference
Current version: |version|
Documentation last compiled: |today|
.. _install:
Installation
============
From conda
----------
Coming soon!
From PyPI
---------
If you already have all the dependencies satisfied, then you can install the
latest release from `PyPI <https://badge.fury.io/py/pyrcel>`_ by using
``pip``:
.. code-block:: bash
$ pip install pyrcel
From source code
----------------
To grab and build the latest bleeding-edge version of the model, you should use
``pip`` and point it to the source code `repository`_ on github:
.. code-block:: bash
$ pip install git+git://github.com/darothen/pyrcel.git
This should automatically build the necessary Cython modules and export the
code package to your normal package installation directory. If you wish to
simply build the code and run it in place, clone the `repository`_, navigate
to it in a terminal, and invoke the build command by hand:
.. code-block:: bash
$ python setup.py build_ext --inplace
This should produce the compiled file `parcel_aux.so` in the model package.
You can also install the code from the cloned source directory by invoking
``pip install`` from within it; this is useful if you're updating or
modifying the model, since you can install an "editable" package which
points directly to the git-monitored code:
.. code-block:: bash
$ cd path/to/pyrcel/
$ pip install -e .
Dependencies
------------
This code was originally written for Python 2.7, and then
`futurized <http://python-future.org/>`_ to Python 3.3+ with hooks for
backwards compatibility. By far, the simplest way to run this code is to grab a
scientific python distribution, such as
`Anaconda <https://store.continuum.io/cshop/anaconda/>`_. This code should work
out-of-the box with almost all dependencies filled (exception being numerical
solvers) on a recent version (1.2+) of this distribution. To facilitate this,
`conda <http://conda.pydata.org/docs/>`_ environments for Python versions 2.7
and 3.5+ are provided in the ``pyrcel/ci`` directory.
Necessary dependencies
^^^^^^^^^^^^^^^^^^^^^^
- `Assimulo <http://www.jmodelica.org/assimulo_home/index.html>`_
- `numba <http://numba.pydata.org>`_
- `numpy <http://www.numpy.org/>`_
- `scipy <http://www.scipy.org/>`_
- `pandas <http://pandas.pydata.org/>`_
.. note::
As of version 1.2.0, the model integration components are being re-written
and only the CVODE interface is exposed. As such, Assimulo is temporarily
a core and required dependency; in the future the other solvers will
be re-enabled. You should first try to install Assimulo via conda
.. code-block:: bash
$ conda install -c conda-forge assimulo
since this will automatically take care of obtaining necessary compiled
dependencies like sundials. However, for best results you may want to
`manually install Assimulo <http://www.jmodelica.org/assimulo_home/installation.html>`_,
since the conda-forge recipe may default to a sundials/OpenBLAS combination
which could degare the performance of the model.
Numerical solver dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- **LSODA** - `scipy <http://www.scipy.org/>`_ or
`odespy <https://github.com/hplgit/odespy/>`_
- **VODE**, **LSODE** - `odespy <https://github.com/hplgit/odespy/>`_
- **CVODE** - `Assimulo <http://www.jmodelica.org/assimulo_home/index.html>`_
Recommended additional packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- `matplotlib <http://matplotlib.sourceforge.net/>`_
- `seaborn <http://stanford.edu/~mwaskom/software/seaborn/index.html>`_
- `PyYAML <http://pyyaml.org/wiki/PyYAMLDocumentation>`_
- `xarray <http://xarray.pydata.org/en/stable/>`_
Testing
-------
A nose test-suite is under construction. To check that your model is configured
and running correctly, you copy and run the notebook corresponding to the
:ref:`basic run example <example_basic>`, or run the command-line interface
version of the model with the pre-packed simple run case:
.. code-block:: bash
$ cd path/to/pyrcel/
$ ./run_parcel examples/simple.yml
Bugs / Suggestions
------------------
The code has an
`issue tracker on github <https://github.com/darothen/pyrcel/issues>`_
and I strongly encourage you to note any problems with the model there, such
as typos or weird behavior and results. Furthermore, I'm looking for ways to
expand and extend the model, so if there is something you might wish to see
added, please note it there or `send me an e-mail <mailto:daniel@danielrothenberg.com>`_.
The code was written in such a way that it should be trivial to add physics in a modular fashion.
.. _repository: http://github.com/darothen/pyrcel
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html notebooks gen dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/* generated/
rm -rf examples/*_files/
rm -rf examples/*.rst
notebooks:
make -C examples notebooks
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
gen:
sphinx-autogen -t templates -o generated *.rst
@echo
@echo "auto-generated content finished building in $(BUILDDIR)/generated"
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyrcel.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyrcel.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/pyrcel"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyrcel"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.. _parcel:
.. currentmodule:: pyrcel
Parcel Model Details
====================
Below is the documentation for the parcel model, which is useful for debugging
and development. For a higher-level overview, see the :ref:`scientific description
<sci_descr>`.
Implementation
--------------
.. autoclass:: ParcelModel
:members: set_initial_conditions, run
Derivative Equation
-------------------
.. automethod:: parcel.parcel_ode_sys
.. _reference:
.. currentmodule:: pyrcel
Reference
=========
Main Parcel Model
-----------------
The core of the model has its own documentation page, which you can access :ref:`here <parcel>`.
.. autosummary::
:toctree: generated/
ParcelModel
Driver Tools
------------
.. automodule:: pyrcel.driver
.. autosummary::
:toctree: generated/
run_model
iterate_runs
Thermodynamics/Kohler Theory
----------------------------
.. automodule:: pyrcel.thermo
.. autosummary::
:toctree: generated/
dv
ka
rho_air
es
sigma_w
Seq
Seq_approx
kohler_crit
critical_curve
Aerosols
--------
.. automodule:: pyrcel.aerosol
.. autosummary::
:toctree: generated/
:template: class.rst
AerosolSpecies
The following are utility functions which might be useful in studying
and manipulating aerosol distributions for use in the :ref:`model <parcel>`
or activation routines.
.. autosummary::
:toctree: generated/
dist_to_conc
Distributions
-------------
.. automodule:: pyrcel.distributions
.. autosummary::
:toctree: generated/
:template: class.rst
BaseDistribution
Gamma
Lognorm
MultiModeLognorm
The following dictionaries containing (multi) Lognormal aerosol size distributions have also been saved for convenience:
1. ``FN2005_single_modes``: Fountoukis, C., and A. Nenes (2005), Continued development of a cloud droplet formation parameterization for global climate models, J. Geophys. Res., 110, D11212, doi:10.1029/2004JD005591
2. ``NS2003_single_modes``: Nenes, A., and J. H. Seinfeld (2003), Parameterization of cloud droplet formation in global climate models, J. Geophys. Res., 108, 4415, doi:10.1029/2002JD002911, D14.
3. ``whitby_distributions``: Whitby, K. T. (1978), The physical characteristics of sulfur aerosols, Atmos. Environ., 12(1-3), 135–159, doi:10.1016/0004-6981(78)90196-8.
4. ``jaenicke_distributions``: Jaenicke, R. (1993), Tropospheric Aerosols, in *Aerosol-Cloud-Climate Interactions*, P. V. Hobbs, ed., Academic Press, San Diego, CA, pp. 1-31.
Activation
----------
.. automodule:: pyrcel.activation
.. autosummary::
:toctree: generated/
lognormal_activation
binned_activation
multi_mode_activation
arg2000
mbn2014
shipwayabel2010
ming2006
.. _constants:
Constants
---------
.. automodule:: pyrcel.constants
ipython
numba<0.57
numpy<1.25
numpydoc
pandas
pyyaml
scipy<1.11
setuptools
sphinx
sphinx_rtd_theme
xarray
.. _sci_descr:
Scientific Description
======================
The simplest tools available for describing the growth and evolution of
a cloud droplet spectrum from a given population of aerosols are based
on zero-dimensional, adiabatic cloud parcel models. By employing a
detailed description of the condensation of ambient water vapor onto the
growing droplets, these models can accurately describe the activation of
a subset of the aerosol population by predicting how the presence of the
aerosols in the updraft modify the maximum supersaturation achieved as
the parcel rises. Furthermore, these models serve as the theoretical
basis or reference for parameterizations of droplet activation which are
included in modern general circulation models ([Ghan2011]_) .
The complexity of these models varies with the range of physical processes
one wishes to study. At the most complex end of the spectrum, one might wish
to accurately resolve chemical transfer between the gas and aqueous phase in
addition to physical transformations such as collision/coalescence. One could
also add ice-phase processes to such a model.
Model Formulation
-----------------
The adiabatic cloud parcel model implemented here is based on
models described in the literature ([Nenes2001]_, [SP2006]_,) with some modifications and improvements. For a full description of the parcel model, please see ([Rothenberg2016]_)
The conservation of heat in a parcel of air rising at constant
velocity :math:`V` without entrainment can be written as
.. math:: \frac{dT}{dt} = -\frac{gV}{c_p} - \frac{L}{c_p}\frac{d w_v}{dt}
:label: dTdt
where :math:`T` is the parcel air temperature. Assuming adiabaticity
and neglecting entrainment is suitable for studying cloud droplet
formation near the cloud base, where the majority of droplet activation
occurs. Because the mass of water must be conserved as it changes from
the vapor to liquid phase, the relationship
.. math:: \frac{d w_v}{dt} = - \frac{dw_c}{dt}
:label: dw_vdt
must hold, where :math:`w_v` and :math:`w_c` are the mass mixing ratios
of water vapor and liquid water (condensed in droplets) in the parcel.
The rate of change of water in the liquid phase in the parcel is
governed solely by condensation onto the existing droplet population.
For a population of :math:`N_i` droplets of radius :math:`r_i`, where
:math:`i=1,\dots,n`, the total condensation rate is given by
.. math:: \frac{dw_c}{dt} = \frac{4\pi \rho_w}{\rho_a}\sum\limits_{i=1}^nN_ir_i^2\frac{dr_i}{dt}
:label: dw_cdt
Here, the particle growth rate, :math:`\frac{dr_i}{dt}` is calculated as
.. math:: \frac{dr_i}{dt} = \frac{G}{r_i}(S-S_{eq})
:label: dr_idt
where :math:`G` is a growth coefficient which is a function of the
physical and chemical properties of the particle receiving condensate,
given by
.. math:: G = \left(\frac{\rho_w R T}{e_s D'_v M_w} + \frac{L\rho_w[(LM_w/RT) - 1]}{k'_a T}\right)^{-1}
:label: G
Droplet growth via condensation is modulated by the difference between
the environmental supersaturation, :math:`S`, and the droplet
equilibrium supersaturation, :math:`S_{eq}`, predicted from Kohler
theory. To account for differences in aerosol chemical properties which
could affect the ability for particles to uptake water, the
:math:`\kappa`-Köhler theory parameterization ([PK2007]_) is employed in the
model. :math:`\kappa`-Kohler theory utilizes a single parameter to
describe aerosol hygroscopicity, and is widely employed in modeling of
aerosol processes. The hygroscopicity parameter :math:`\kappa` is
related to the water activity of an aqueous aerosol solution by
.. math:: \frac{1}{a_w} = 1 + \kappa\frac{V_s}{V_w}
where :math:`V_s` and :math:`V_w` are the volumes of dy particulate
matter and water in the aerosol solution. With this parameter, the full
:math:`\kappa`-Kohler theory may be expressed as
.. math:: S_{eq} = \frac{r_i^3 - r_{d,i}^3}{r_i^3 - r_{d,i}^3(1-\kappa_i)}\exp\left( \frac{2M_w\sigma_w}{RT\rho_w r_i} \right) - 1
:label: S_eq
where :math:`r_d` and :math:`r` are the dry aerosol particle size and
the total radius of the wetted aerosol. The surface tension of water,
:math:`\sigma_w`, is dependent on the temperature of the parcel such
that :math:`\sigma_w = 0.0761 - 1.55\times 10^{-4}(T-273.15)`
J/m\ :math:`^2` . Both the diffusivity and thermal conductivity of air
have been modified in the growth coefficient equation to account for
non-continuum effects as droplets grow, and are given by the expressions
.. math:: D'_v = D_v\bigg/\left(1 + \frac{D_v}{a_c r}\sqrt{\frac{2\pi M_w}{RT}}\right)
and
.. math:: k'_a = k_a\bigg/\left(1 + \frac{k_a}{a_T r \rho_a c_p}\sqrt{\frac{2\pi M_a}{RT}} \right)
In these expressions, the thermal accommodation coefficient,
:math:`a_T`, is assumed to be :math:`0.96` and the condensation
coefficient, :math:`a_c` is taken as unity (see :ref:`Constants <constants>`).
Under the adiabatic assumption, the evolution of the parcel’s
supersaturation is governed by the balance between condensational
heating as water vapor condenses onto droplets and cooling induced by
the parcel’s vertical motion,
.. math:: \frac{dS}{dt} = \alpha V - \gamma\frac{w_c}{dt}
:label: dSdt
where :math:`\alpha` and :math:`\gamma` are functions which are weakly
dependent on temperature and pressure :
.. math:: \alpha = \frac{gM_wL}{c_pRT^2} - \frac{gM_a}{RT}
.. math:: \gamma = \frac{PM_a}{e_sM_w} + \frac{M_wL^2}{c_pRT^2}
The parcel’s pressure is predicted using the hydrostatic relationship,
accounting for moisture by using virtual temperature (which can always
be diagnosed as the model tracks the specific humidity through the mass
mixing ratio of water vapor),
.. math:: \frac{dP}{dt} = \frac{-g P V}{R_d T_v}
:label: dPdt
The equations :eq:`dPdt`, :eq:`dSdt`, :eq:`dw_cdt`, :eq:`dw_vdt`,
and :eq:`dTdt` provide a simple, closed system of ordinary
differential equations which can be numerically integrated forward in
time. Furthermore, this model formulation allows the simulation of an
arbitrary configuration of initial aerosols, in terms of size, number
concentration, and hygroscopicity. Adding additional aerosol size bins
is simply accomplished by tracking one additional size bin in the system
of ODE’s. The important application of this feature is that the model
can be configured to simulate both internal or external mixtures of
aerosols, or some combination thereof.
Model Implementation and Procedure
----------------------------------
The parcel model described in the previous section was implemented using
a modern modular and object-oriented software engineering framework.
This framework allows the model to be simply configured with myriad
initial conditions and aerosol populations. It also enables model
components - such as the numerical solver or condensation
parameterization - to be swapped and replaced. Most importantly, the use
of object-oriented techniques allows the model to be incorporated into
frameworks which grossly accelerate the speed at which the model can be
evaluated. For instance, although models like the one developed here are
relatively cheap to execute, large ensembles of model runs have been
limited in scope to several hundred or a thousand runs. However, the
framework of this particular parcel model implementation was designed
such that it could be run as a black box as part of a massively-parallel
ensemble driver.
To run the model, a set of initial conditions needs to be specified,
which includes the updraft speed, the parcel’s initial temperature,
pressure, and supersaturation, and the aerosol population. Given these
parameters, the model calculates an initial equilibrium droplet spectrum
by computing the equilibrium wet radii of each aerosol. This is calculated
numerically from the Kohler equation for each aerosol/proto-droplet, or
numerically by employing the typical Kohler theory approximation
.. math:: S \approx \frac{A}{r} - \kappa\frac{r_d^3}{r^3}
These wet radii are used as the initial droplet radii in the simulation.
Once the initial conditions have been configured, the model is
integrated forward in time with a numerical solver (see :func:`ParcelModel.run`
for more details). The available solvers wrapped here are:
- LSODA(R)
- LSODE
- (C)VODE
During the model integration, the size representing each aerosol bin is
allowed to grow via condensation, producing something akin to a moving
grid. In the future, a fixed Eulerian
grid will likely be implemented in the model for comparison.
Aerosol Population Specification
--------------------------------
The model may be supplied with any arbitrary population of aerosols,
providing the population can be approximated with a sectional
representation. Most commonly, aerosol size distributions are
represented with a continuous lognormal distribution,
.. math:: n_N(r) = \frac{dN}{d \ln r} = \frac{N_t}{\sqrt{2\pi}\ln \sigma_g}\exp\left(-\frac{ \ln^2(r/\mu_g)}{2\ln^2\sigma_g}\right)
:label: lognormal
which can be summarized with the set of three parameters,
:math:`(N_t, \mu_g, \sigma_g)` and correspond, respectively, to the
total aerosol number concentration, the geometric mean or number mode
radius, and the geometric standard deviation. Complicated multi-modal
aerosol distributions can often be represented as the sum of several
lognormal distributions. Since the parcel model describes the evolution
of a discrete aerosol size spectrum, can be broken into :math:`n` bins,
and the continuous aerosol size distribution approximated by taking the
number concentration and size at the geometric mean value in each bin,
such that the discrete approximation to the aerosol size distribution
becomes
.. math:: n_{N,i}(r_i) = \sum\limits_{i=1}^n\frac{N_i}{\sqrt{2\pi}\ln\sigma_g}\exp\left(-\frac{\ln^2(r_i/\mu_g)}{2\ln^2\sigma_g}\right)
If no bounds on the size range of :math:`r_i` is specified, then the
model pre-computes :math:`n` equally-spaced bins over the logarithm of
:math:`r`, and covers the size range :math:`\mu_g/10\sigma_g` to
:math:`10\sigma_g\mu_g`. It is typical to run the model with :math:`200`
size bins per aerosol mode. Neither this model nor similar ones exhibit
much sensitivity towards the density of the sectional discretization .
Typically, a single value for hygroscopicity, :math:`\kappa` is
prescribed for each aerosol mode. However, the model tracks a
hygroscopicity parameter for each individual size bin, so size-dependent
aerosol composition can be incorporated into the aerosol population.
This representation of the aerosol population is similar to the external
mixing state assumption. An advantage to using this representation is
that complex mixing states can be represented by adding various size
bins, each with their own number concentration and hygroscopicity.
.. topic:: Reference
References
----------
.. [Nenes2001] Nenes, A., Ghan, S., Abdul-Razzak, H., Chuang, P. Y. & Seinfeld, J. H. Kinetic limitations on cloud droplet formation and impact on cloud albedo. Tellus 53, 133–149 (2001).
.. [SP2006] Seinfeld, J. H. & Pandis, S. N. Atmospheric Chemistry and Physics: From Air Pollution to Climate Change. Atmos. Chem. Phys. 2nd, 1203 (Wiley, 2006).
.. [Rothenberg2016] Daniel Rothenberg and Chien Wang, 2016: Metamodeling of Droplet Activation for Global Climate Models. *J. Atmos. Sci.*, **73**, 1255–1272. doi: http://dx.doi.org/10.1175/JAS-D-15-0223.1
.. [PK2007] Petters, M. D. & Kreidenweis, S. M. A single parameter representation of hygroscopic growth and cloud condensation nucleus activity. Atmos. Chem. Phys. 7, 1961–1971 (2007).
.. [Ghan2011] Ghan, S. J. et al. Droplet nucleation: Physically-based parameterizations and comparative evaluation. J. Adv. Model. Earth Syst. 3, M10001 (2011).
:mod:`{{module}}`.{{objname}}
{{ underline }}==============
.. currentmodule:: {{ module }}
.. autoclass:: {{ objname }}
:members:
:undoc-members:
{% block methods %}
.. automethod:: __init__
{% endblock %}
# Simple, example configuration script for a parcel model simulation
# Save the simulation output in a folder "output/" with the name "simple"
experiment_control:
name: "simple"
output_dir: "output/"
# Here, we set the model to run in 10 second chunks and save output interpolated to every
# 1 second. It will end after 9999 simulation seconds, unless termination criteria
# (a supersaturation max) is reached, after which it will stop once the simulated parcel has
# ascended 10 more meters.
model_control:
output_dt: 0.5
solver_dt: 1.0
t_end: 9999.0
terminate: true
terminate_depth: 25.0
# Initialize the model with two aerosol species:
initial_aerosol:
# 1) a sulfate mode, with 250 bins, and
- name: sulfate
distribution: lognormal
distribution_args: { mu: 0.15, N: 1000, sigma: 1.2 }
kappa: 0.54
bins: 250
# Set the model initial conditions
initial_conditions:
temperature: 283.15 # K
relative_humidity: 0.95 # %, as a fraction
pressure: 85000.0 # Pa
updraft_speed: 0.44 # m/s
# Simple, example configuration script for a parcel model simulation
# Save the simulation output in a folder "output/" with the name "simple"
experiment_control:
name: "simple_supersat"
output_dir: "output/"
# Here, we set the model to run in 10 second chunks and save output interpolated to every
# 1 second. It will end after 9999 simulation seconds, unless termination criteria
# (a supersaturation max) is reached, after which it will stop once the simulated parcel has
# ascended 10 more meters.
model_control:
output_dt: 1.0
solver_dt: 10.0
t_end: 9999.0
terminate: true
terminate_depth: 10.0
# Initialize the model with two aerosol species:
initial_aerosol:
# 1) a sulfate mode, with 250 bins, and
- name: sulfate
distribution: lognormal
distribution_args: { mu: 0.15, N: 1000, sigma: 1.2 }
kappa: 0.54
bins: 250
# 2) a mixed mode of small particles, with 50 bins
- name: mos
distribution: lognormal
distribution_args: { mu: 0.02, N: 50000, sigma: 1.05 }
kappa: 0.12
bins: 50
# Set the model initial conditions
initial_conditions:
temperature: 283.15 # K
relative_humidity: 1.005 # %, as a fraction
pressure: 85000.0 # Pa
updraft_speed: 0.44 # m/s
# Simple, example configuration script for a parcel model simulation
# Save the simulation output in a folder "output/" with the name "simple"
experiment_control:
name: "simple"
output_dir: "output/"
# Here, we set the model to run in 10 second chunks and save output interpolated to every
# 1 second. It will end after 9999 simulation seconds, unless termination criteria
# (a supersaturation max) is reached, after which it will stop once the simulated parcel has
# ascended 10 more meters.
model_control:
output_dt: 1.0
solver_dt: 10.0
t_end: 9999.0
terminate: true
terminate_depth: 10.0
# Initialize the model with two aerosol species:
initial_aerosol:
# 1) a sulfate mode, with 250 bins, and
- name: sulfate
distribution: lognormal
distribution_args: { mu: 0.15, N: 1000, sigma: 1.2 }
kappa: 0.54
bins: 250
# 2) a mixed mode of small particles, with 50 bins
- name: mos
distribution: lognormal
distribution_args: { mu: 0.02, N: 50000, sigma: 1.05 }
kappa: 0.12
bins: 50
# Set the model initial conditions
initial_conditions:
temperature: 283.15 # K
relative_humidity: 0.95 # %, as a fraction
pressure: 85000.0 # Pa
updraft_speed: 0.44 # m/s
[build-system]
requires = ["setuptools", "setuptools-scm"]
build-backend = "setuptools.build_meta"
[project]
description = "pyrcel: 0D adiabatic cloud parcel model"
name = "pyrcel"
authors = [
{ name = "Daniel Rothenberg", email = "daniel@danielrothenberg.com" },
]
readme = "README.md"
requires-python = ">=3.8"
license = { file = "LICENSE.md" }
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Scientific/Engineering :: Atmospheric Science",
]
dependencies = [
"numba",
"numpy",
"pandas",
"pyyaml",
"scipy",
"setuptools",
"setuptools-scm",
"xarray",
]
dynamic = ["version"]
[project.urls]
Documentation = "https://pyrcel.readthedocs.io/en/latest/"
Repository = "https://github.com/darothen/pyrcel"
[tools.setuptools]
packages = ["pyrcel"]
[tool.setuptools.packages]
find = {namespaces = false}
[project.scripts]
run_parcel = "pyrcel.scripts.run_parcel:run_parcel"
[tool.setuptools_scm]
version_file = "pyrcel/version.py"
[console_scripts]
run_parcel = pyrcel.scripts.run_parcel:run_parcel
import numba as nb
import numpy as np
from numba.pycc import CC
import pyrcel.constants as c
## Define double DTYPE
DTYPE = np.float64
PI = 3.14159265358979323846264338328
N_STATE_VARS = c.N_STATE_VARS
# AOT/numba stuff
auxcc = CC("parcel_aux_numba")
auxcc.verbose = True
## Auxiliary, single-value calculations with GIL released for derivative
## calculations
@nb.njit()
@auxcc.export("sigma_w", "f8(f8)")
def sigma_w(T):
"""See :func:`pyrcel.thermo.sigma_w` for full documentation"""
return 0.0761 - (1.55e-4) * (T - 273.15)
@nb.njit()
@auxcc.export("ka", "f8(f8, f8, f8)")
def ka(T, r, rho):
"""See :func:`pyrcel.thermo.ka` for full documentation"""
ka_cont = 1e-3 * (4.39 + 0.071 * T)
denom = 1.0 + (ka_cont / (c.at * r * rho * c.Cp)) * np.sqrt(
(2 * PI * c.Ma) / (c.R * T)
)
return ka_cont / denom
@nb.njit()
@auxcc.export("dv", "f8(f8, f8, f8, f8)")
def dv(T, r, P, accom):
"""See :func:`pyrcel.thermo.dv` for full documentation"""
P_atm = P * 1.01325e-5 # Pa -> atm
dv_cont = 1e-4 * (0.211 / P_atm) * ((T / 273.0) ** 1.94)
denom = 1.0 + (dv_cont / (accom * r)) * np.sqrt((2 * PI * c.Mw) / (c.R * T))
return dv_cont / denom
@nb.njit()
@auxcc.export("es", "f8(f8)")
def es(T):
"""See :func:`pyrcel.thermo.es` for full documentation"""
return 611.2 * np.exp(17.67 * T / (T + 243.5))
@nb.njit()
@auxcc.export("Seq", "f8(f8, f8, f8)")
def Seq(r, r_dry, T, kappa):
"""See :func:`pyrcel.thermo.Seq` for full documentation."""
A = (2.0 * c.Mw * sigma_w(T)) / (c.R * T * c.rho_w * r)
B = 1.0
if kappa > 0.0:
B = (r**3 - (r_dry**3)) / (r**3 - (r_dry**3) * (1.0 - kappa))
return np.exp(A) * B - 1.0
## RHS Derivative callback function
@nb.njit(parallel=True)
@auxcc.export("parcel_ode_sys", "f8[:](f8[:], f8, i4, f8[:], f8[:], f8, f8[:], f8)")
def parcel_ode_sys(y, t, nr, r_drys, Nis, V, kappas, accom):
"""Calculates the instantaneous time-derivative of the parcel model system.
Given a current state vector `y` of the parcel model, computes the tendency
of each term including thermodynamic (pressure, temperature, etc) and aerosol
terms. The basic aerosol properties used in the model must be passed along
with the state vector (i.e. if being used as the callback function in an ODE
solver).
Parameters
----------
y : array_like
Current state of the parcel model system,
* y[0] = altitude, m
* y[1] = Pressure, Pa
* y[2] = temperature, K
* y[3] = water vapor mass mixing ratio, kg/kg
* y[4] = cloud liquid water mass mixing ratio, kg/kg
* y[5] = cloud ice water mass mixing ratio, kg/kg
* y[6] = parcel supersaturation
* y[7:] = aerosol bin sizes (radii), m
t : float
Current simulation time, in seconds.
nr : Integer
Number of aerosol radii being tracked.
r_drys : array_like
Array recording original aerosol dry radii, m.
Nis : array_like
Array recording aerosol number concentrations, 1/(m**3).
V : float
Updraft velocity, m/s.
kappas : array_like
Array recording aerosol hygroscopicities.
accom : float, optional (default=:const:`constants.ac`)
Condensation coefficient.
Returns
-------
x : array_like
Array of shape (``nr``+7, ) containing the evaluated parcel model
instaneous derivative.
Notes
-----
This function is implemented using numba; it does not need to be just-in-
time compiled in order ot function correctly, but it is set up ahead of time
so that the internal loop over each bin growth term is parallelized.
"""
z = y[0]
P = y[1]
T = y[2]
wv = y[3]
wc = y[4]
wi = y[5]
S = y[6]
rs = y[N_STATE_VARS:]
T_c = T - 273.15 # convert temperature to Celsius
pv_sat = es(T_c) # saturation vapor pressure
wv_sat = wv / (S + 1.0) # saturation mixing ratio
Tv = (1.0 + 0.61 * wv) * T
e = (1.0 + S) * pv_sat # water vapor pressure
## Compute air densities from current state
rho_air = P / c.Rd / Tv
#: TODO - port to parcel.py
rho_air_dry = (P - e) / c.Rd / T
## Begin computing tendencies
dP_dt = -1.0 * rho_air * c.g * V
dwc_dt = 0.0
# drs_dt = np.empty(shape=(nr), dtype=DTYPE)
drs_dt = np.empty_like(rs)
for i in nb.prange(nr):
r = rs[i]
r_dry = r_drys[i]
kappa = kappas[i]
Ni = Nis[i]
## Non-continuum diffusivity/thermal conductivity of air near
## near particle
dv_r = dv(T, r, P, accom)
ka_r = ka(T, r, rho_air)
## Condensation coefficient
G_a = (c.rho_w * c.R * T) / (pv_sat * dv_r * c.Mw)
G_b = (c.L * c.rho_w * ((c.L * c.Mw / (c.R * T)) - 1.0)) / (ka_r * T)
G = 1.0 / (G_a + G_b)
## Difference between ambient and particle equilibrium supersaturation
Seq_r = Seq(r, r_dry, T, kappa)
delta_S = S - Seq_r
## Size and liquid water tendencies
dr_dt = (G / r) * delta_S
dwc_dt += (
Ni * r * r * dr_dt
) # Contribution to liq. water tendency due to growth
drs_dt[i] = dr_dt
dwc_dt *= 4.0 * PI * c.rho_w / rho_air_dry # Hydrated aerosol size -> water mass
# use rho_air_dry for mixing ratio definition consistency
# No freezing implemented yet
dwi_dt = 0.0
## MASS BALANCE CONSTRAINT
dwv_dt = -1.0 * (dwc_dt + dwi_dt)
## ADIABATIC COOLING
dT_dt = -c.g * V / c.Cp - c.L * dwv_dt / c.Cp
dz_dt = V
""" Alternative methods for calculation supersaturation tendency
# Used eq 12.28 from Pruppacher and Klett in stead of (9) from Nenes et al, 2001
#cdef double S_a, S_b, S_c, dS_dt
#cdef double S_b_old, S_c_old, dS_dt_old
#S_a = (S+1.0)
## NENES (2001)
#S_b_old = dT_dt*wv_sat*(17.67*243.5)/((243.5+(Tv-273.15))**2.)
#S_c_old = (rho_air*g*V)*(wv_sat/P)*((0.622*L)/(Cp*Tv) - 1.0)
#dS_dt_old = (1./wv_sat)*(dwv_dt - S_a*(S_b_old-S_c_old))
## PRUPPACHER (PK 1997)
#S_b = dT_dt*0.622*L/(Rd*T**2.)
#S_c = g*V/(Rd*T)
#dS_dt = P*dwv_dt/(0.622*es(T-273.15)) - S_a*(S_b + S_c)
## SEINFELD (SP 1998)
#S_b = L*Mw*dT_dt/(R*T**2.)
#S_c = V*g*Ma/(R*T)
#dS_dt = dwv_dt*(Ma*P)/(Mw*es(T-273.15)) - S_a*(S_b + S_c)
"""
## GHAN (2011)
alpha = (c.g * c.Mw * c.L) / (c.Cp * c.R * (T**2))
alpha -= (c.g * c.Ma) / (c.R * T)
gamma = (P * c.Ma) / (c.Mw * pv_sat)
gamma += (c.Mw * c.L * c.L) / (c.Cp * c.R * T * T)
dS_dt = alpha * V - gamma * dwc_dt
# x = np.empty(shape=(nr+N_STATE_VARS), dtype='d')
x = np.empty_like(y)
x[0] = dz_dt
x[1] = dP_dt
x[2] = dT_dt
x[3] = dwv_dt
x[4] = dwc_dt
x[5] = dwi_dt
x[6] = dS_dt
x[N_STATE_VARS:] = drs_dt[:]
return x
#!/usr/bin/env python
"""
CLI interface to run parcel model simulation.
"""
import os
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import yaml
import pyrcel as pm
import pyrcel.util
parser = ArgumentParser(
description=__doc__, formatter_class=RawDescriptionHelpFormatter
)
parser.add_argument(
"namelist",
type=str,
metavar="config.yml",
help="YAML namelist controlling simulation configuration",
)
DIST_MAP = {
"lognormal": pm.Lognorm,
}
def run_parcel():
# Read command-line arguments
args = parser.parse_args()
# Convert the namelist file into an in-memory dictionary
try:
print("Attempting to read simulation namelist {}".format(args.namelist))
with open(args.namelist, "rb") as f:
y = yaml.safe_load(f)
except IOError:
print("Couldn't read file {}".format(args.namelist))
sys.exit(0)
# Create the aerosol
aerosol_modes = []
print("Constructing aerosol modes")
for i, aerosol_params in enumerate(y["initial_aerosol"], start=1):
ap = aerosol_params
dist = DIST_MAP[ap["distribution"]](**ap["distribution_args"])
aer = pm.AerosolSpecies(ap["name"], dist, kappa=ap["kappa"], bins=ap["bins"])
print(" {:2d})".format(i), aer)
aerosol_modes.append(aer)
# Set up the model
ic = y["initial_conditions"]
print("Initializing model")
try:
model = pm.ParcelModel(
aerosol_modes,
V=ic["updraft_speed"],
T0=ic["temperature"],
S0=-1.0 * (1.0 - ic["relative_humidity"]),
P0=ic["pressure"],
console=True,
truncate_aerosols=True,
)
except pyrcel.util.ParcelModelError:
print("Something went wrong setting up the model")
sys.exit(0)
# Run the model
mc = y["model_control"]
print("Beginning simulation")
try:
par_out, aer_out = model.run(
max_steps=2000,
solver="cvode",
output_fmt="dataframes",
# terminate=True,
# terminate_depth=10.,
**mc,
)
except pyrcel.util.ParcelModelError:
print("Something went wrong during model run")
sys.exit(0)
# Output
ec = y["experiment_control"]
Smax = par_out["S"].max()
T_fin = par_out["T"].iloc[-1]
# Make output directory if it doesn't exist
if not os.path.exists(ec["output_dir"]):
os.makedirs(ec["output_dir"])
out_file = os.path.join(ec["output_dir"], ec["name"]) + ".nc"
try:
print("Trying to save output to {}".format(out_file))
pm.output.write_parcel_output(out_file, parcel=model)
except (IOError, RuntimeError):
print("Something went wrong saving to {}".format(out_file))
sys.exit(0)
# Succesful completion
print("Done!")
if __name__ == "__main__":
run_parcel()
from itertools import product
import numpy as np
## Import unit testing librarys
try:
import unittest2 as unittest
except ImportError:
import unittest
def prod_to_array(*iterables):
return np.array(list(product(*iterables)))
""" Generate test case data for future reference.
"""
import os
import pickle
from itertools import product
import numpy as np
from pyrcel import thermo
REFERENCE_FN = "results.dict"
def generate_reference(overwrite=False):
results = dict()
results["temperatures"] = np.linspace(233, 333, 10) # K
results["pressures"] = np.linspace(100, 1050, 10) # hPa
results["radii"] = np.logspace(-3, 1, 10) # microns
results["densities"] = np.linspace(0.5, 1.5, 10)
results["dry_radii"] = np.logspace(-8, -6, 5)
results["kappas"] = np.logspace(-2, 0, 5)
results["r_over_r_dry"] = np.logspace(0.1, 3, 5)
print("dv_cont", end=", ")
results["dv_cont"] = [
thermo.dv_cont(T, P * 100)
for T, P in product(results["temperatures"], results["pressures"])
]
print("(%d cases)" % len(results["dv_cont"]))
print("dv", end=", ")
results["dv"] = [
thermo.dv(T, r * 1e-6, P * 100)
for T, r, P in product(
results["temperatures"], results["radii"], results["pressures"]
)
]
print("(%d cases)" % len(results["dv"]))
print("rho_air", end=", ")
results["rho_air"] = [
thermo.rho_air(T, P * 100)
for T, P in product(results["temperatures"], results["pressures"])
]
print("(%d cases)" % len(results["rho_air"]))
print("es", end=", ")
results["es"] = [thermo.es(T - 273.15) for T in results["temperatures"]]
print("(%d cases)" % len(results["es"]))
print("ka_cont", end=", ")
results["ka_cont"] = [thermo.ka_cont(T) for T in results["temperatures"]]
print("(%d cases)" % len(results["ka_cont"]))
print("ka", end=", ")
results["ka"] = [
thermo.ka(T, rho, r * 1e-6)
for T, rho, r in product(
results["temperatures"], results["densities"], results["radii"]
)
]
print("(%d cases)" % len(results["ka"]))
print("sigma_w", end=", ")
results["sigma_w"] = [thermo.sigma_w(T) for T in results["temperatures"]]
print("(%d cases)" % len(results["sigma_w"]))
print("Seq", end=", ")
results["Seq_approx"] = [
thermo.Seq(f * r_dry * 1e-6, r_dry * 1e-6, T, kappa)
for f, r_dry, T, kappa in product(
results["r_over_r_dry"],
results["dry_radii"],
results["temperatures"],
results["kappas"],
)
]
results["Seq_exact"] = [
thermo.Seq_approx(f * r_dry * 1e-6, r_dry * 1e-6, T, kappa)
for f, r_dry, T, kappa in product(
results["r_over_r_dry"],
results["dry_radii"],
results["temperatures"],
results["kappas"],
)
]
print("(%d cases)" % (2 * len(results["Seq_exact"]),))
if (not os.path.exists(REFERENCE_FN)) or overwrite:
with open(REFERENCE_FN, "wb") as f:
pickle.dump(results, f)
if __name__ == "__main__":
generate_reference(True)
#!/usr/env python
import time
import matplotlib.pyplot as plt
import pyrcel as pm
from pyrcel.postprocess import simulation_activation
P0 = 95000.0 # Pressure, Pa
T0 = 290.15 # Temperature, K
S0 = -0.01 # Supersaturation, 1-RH
V = 1.0
accom = 0.1
mu = 0.025
sigma = 1.82
kappa = 0.54
N = 1500.0
aerosol_distribution = pm.Lognorm(mu=mu, sigma=sigma, N=N)
aerosol = pm.AerosolSpecies(
"test", aerosol_distribution, kappa=kappa, bins=200
)
output_dt = 1.0 # simulation seconds; how frequently should output be saved?
solver_dt = (
10.0
) # simulation seconds; how frequently should the solver be re-set?
# Why this change? (1) Adding ice is going to introduce a
# time-splitting operation, so the integration logic will need
# to change accordingly and be more sequential, step-by-step.
# (2) The solver works *much* better in terms of adapting to the
# stiffness of the ODE when it doesn't have to try to predict very
# far in advance where the solution will shoot.
#
# In general, use solver_dt = 10.*output_dt
dZ = 1000.0
t_end = dZ / V
results = {}
initial_aerosols = [aerosol]
## Vanilla parcel model run
model = pm.ParcelModel(
initial_aerosols, V, T0, S0, P0, console=True, accom=accom
)
# raw_input("Continue? ")
start = time.time()
parcel, aerosols = model.run(
t_end,
output_dt,
solver_dt, # note new argument order!
terminate=True,
terminate_depth=50.0,
max_steps=2000,
solver="cvode",
output="dataframes",
)
end = time.time()
Smax = parcel.S.max()
tmax = parcel.S.argmax()
print("Elapsed time:", end - start)
print(" Smax", Smax)
print(" tmax", tmax)
print("")
print("Computing activation")
acts_total = simulation_activation(model, parcel, aerosols)
fig = plt.figure(2)
ax = fig.add_subplot(111)
def quick_plot(ax):
plt_meta = parcel.plot("z", "S", zorder=3, ax=ax)
ylims = ax.get_ylim()
c = plt_meta.lines[-1].get_color()
plt.vlines(parcel.z.ix[tmax], ylims[0], Smax, color=c, linestyle="dashed")
plt.hlines(Smax, 0, parcel.z.iloc[tmax], color=c, linestyle="dashed")
plt.ylim(S0)
quick_plot(ax)
model.save("test.nc", other_dfs=[acts_total])

Sorry, the diff of this file is too big to display

""" Test cases for thermodynamics module.
Most of these test cases just compare the current version of the code's results
for parameter sets versus a reference to serve as basic regression testing.
"""
import pickle
import unittest
from itertools import product
from numpy.testing import assert_allclose
from .generate_data import REFERENCE_FN
from ..thermo import *
class TestThermoTestCases(unittest.TestCase):
def setUp(self):
with open(REFERENCE_FN, "rb") as f:
self.reference = pickle.load(f)
self.temperatures = self.reference["temperatures"]
self.pressures = self.reference["pressures"]
self.radii = self.reference["radii"]
self.densities = self.reference["densities"]
def test_dv_cont(self):
result = [
dv_cont(T, P * 100)
for T, P in product(self.temperatures, self.pressures)
]
assert_allclose(self.reference["dv_cont"], result)
def test_dv(self):
result = [
dv(T, r * 1e-6, P * 100)
for T, r, P in product(
self.temperatures, self.radii, self.pressures
)
]
assert_allclose(self.reference["dv"], result)
def test_rho_air(self):
result = [
rho_air(T, P * 100)
for T, P in product(self.temperatures, self.pressures)
]
assert_allclose(self.reference["rho_air"], result)
def test_es(self):
result = [es(T - 273.15) for T in self.temperatures]
assert_allclose(self.reference["es"], result)
def test_ka_cont(self):
result = [ka_cont(T) for T in self.temperatures]
assert_allclose(self.reference["ka_cont"], result)
def test_ka(self):
result = [
ka(T, rho, r * 1e-6)
for T, rho, r in product(
self.temperatures, self.densities, self.radii
)
]
assert_allclose(self.reference["ka"], result)
def sigma_w(self):
result = [sigma_w(T) for T in self.temperatures]
assert_allclose(self.reference["sigma_w"], result)
"""
Software utilities
"""
class ParcelModelError(Exception):
"""Custom exception to throw during parcel model execution."""
def __init__(self, error_str):
self.error_str = error_str
def __str__(self):
return repr(self.error_str)
pyrcel: cloud parcel model
==========================
![sample parcel model run](docs/figs/model_example.png)
[![DOI](https://zenodo.org/badge/12927551.svg)](https://zenodo.org/badge/latestdoi/12927551)[![PyPI Version](https://badge.fury.io/py/pyrcel.svg)](https://badge.fury.io/py/pyrcel)[![CircleCI Build Status](https://circleci.com/gh/darothen/pyrcel/tree/master.svg?style=svg)](https://circleci.com/gh/darothen/pyrcel/tree/master)[![Documentation Status](https://readthedocs.org/projects/pyrcel/badge/?version=stable)](http://pyrcel.readthedocs.org/en/stable/?badge=stable)
This is an implementation of a simple, adiabatic cloud parcel model for use in
aerosol-cloud interaction studies. [Rothenberg and Wang (2016)](http://journals.ametsoc.org/doi/full/10.1175/JAS-D-15-0223.1) discuss the model in detail and its improvements
and changes over [Nenes et al (2001)][nenes2001]:
* Implementation of κ-Köhler theory for condensation physics ([Petters and
Kreidenweis, 2007)][pk2007]
* Extension of model to handle arbitrary sectional representations of aerosol
populations, based on user-controlled empirical or parameterized size distributions
* Improved, modular numerical framework for integrating the model, including bindings
to several different stiff integrators:
- `lsoda` - [scipy ODEINT wrapper](http://docs.scipy.org/doc/scipy/reference/generated/scipy.integrate.odeint.html)
- `vode, lsode*, lsoda*` - ODEPACK via [odespy][hplgit]
- `cvode` - SUNDIALS via [Assimulo](http://www.jmodelica.org/assimulo_home/index.html#)
among other details. It also includes a library of droplet activation routines and scripts/notebooks for evaluating those schemes against equivalent calculations done with the parcel model.
Updated code can be found the project [github repository](https://github.com/darothen/pyrcel). If you'd like to use this code or have any questions about it, please [contact the author][author_email]. In particular, if you use this code for research purposes, be sure to carefully read through the model and ensure that you have tweaked/configured it for your purposes (i.e., modifying the accomodation coefficient); other derived quantities).
[Detailed documentation is available](http://pyrcel.readthedocs.org/en/latest/index.html), including a [scientific description](http://pyrcel.readthedocs.org/en/latest/sci_descr.html), [installation details](http://pyrcel.readthedocs.org/en/latest/install.html), and a [basic example](http://pyrcel.readthedocs.org/en/latest/examples/basic_run.html) which produces a figure like the plot at the top of this page.
Requirements
------------
**Required**
* Python >= 3.7
* [numba](http://numba.pydata.org)
* [NumPy](http://www.numpy.org)
* [SciPy](http://www.scipy.org)
* [pandas](http://pandas.pydata.org) - v0.25+
* [xarray](http://xarray.pydata.org/en/stable/) - v2023+
* [PyYAML](http://pyyaml.org/)
**Optional**
The following packages are used for better numerics (ODE solving)
* [Assimulo](http://www.jmodelica.org/assimulo)
The easiest way to satisfy the basic requirements for building and running the
model is to use the [Anaconda](http://continuum.io/downloads) scientific Python
distribution. Alternatively, a
[miniconda environment](http://conda.pydata.org/docs/using/envs.html) is
provided to quickly set-up and get running the model. Assimulo's dependency on
the SUNDIALS library makes it a little bit tougher to install in an automated
fashion, so it has not been included in the automatic setup provided here; you
should refer to [Assimulo's documentation](http://www.jmodelica.org/assimulo_home/installation.html)
for more information on its installation process. Note that many components of
the model and package can be used without Assimulo.
Development
-----------
[http://github.com/darothen/pyrcel]()
Please fork this repository if you intend to develop the model further so that the
code's provenance can be maintained.
License
-------
[All scientific code should be licensed](http://www.astrobetter.com/the-whys-and-hows-of-licensing-scientific-code/). This code is released under the New BSD (3-clause) [license](LICENSE.md).
[author_email]: mailto:daniel@danielrothenberg.com
[nenes2001]: http://nenes.eas.gatech.edu/Preprints/KinLimitations_TellusPP.pdf
[pk2007]: http://www.atmos-chem-phys.net/7/1961/2007/acp-7-1961-2007.html
[hplgit]: https://github.com/hplgit/odespy
numba
numpy
pandas
pyyaml
scipy
setuptools
xarray
# Master TODO list
1. Currently uses virtual temperature ($T_v = T(1 + 0.61w)$) when approximating density; should rather use the density temperature for consistency.
- updates in thermo.py, parcel_aux.pyx, and fortran code
2. Flesh out basic plotting routines for quick visualization.
3. Add a simple IO package
- save state for an initial simulation, and its end point
- read in that state to initialize a new model
- better flexibility between pandas / xray as basic storage options
4. Re-structure integration logic
- model should accept two timesteps - one of the numerical integration, one for the output
+ Actually not a problem for the variable-step size solvers; e.g. for CVODE, dt will be the output points
- the integration should proceed piecewise between output timesteps
+ Already does for Assimulo; 1-minute chunks reporting at the desired timestep
5. Add activation diagnostics to integration loop
+1
-1

@@ -1,2 +0,2 @@

Copyright (c) Daniel Rothenberg, 2012-2016
Copyright (c) Daniel Rothenberg, 2012-2019
All rights reserved.

@@ -3,0 +3,0 @@

+124
-20

@@ -1,28 +0,132 @@

Metadata-Version: 1.1
Metadata-Version: 2.1
Name: pyrcel
Version: 1.3.1
Version: 1.3.2
Summary: pyrcel: 0D adiabatic cloud parcel model
Home-page: https://github.com/darothen/pyrcel
Author: Daniel Rothenberg
Author-email: darothen@mit.edu
License: New BSD (3-clause)
Download-URL: https://github.com/darothen/pyrcel
Description:
This code implements a relatively simple, adiabatic cloud parcel model for studying aerosol
activation. It allows the user to peform and iterate parcel model experiments in a simple
fashion through the use of an object-oriented implementation. Furthermore, interfaces for
several numerical solvers are built into the model so that users can choose whatever scheme
they would like to run with the model.
Platform: UNKNOWN
Author-email: Daniel Rothenberg <daniel@danielrothenberg.com>
License: Copyright (c) Daniel Rothenberg, 2012-2019
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Project-URL: Documentation, https://pyrcel.readthedocs.io/en/latest/
Project-URL: Repository, https://github.com/darothen/pyrcel
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Console
Classifier: Intended Audience :: Science/Research
Classifier: License :: OSI Approved :: MIT License
Classifier: License :: OSI Approved :: BSD License
Classifier: Natural Language :: English
Classifier: Operating System :: Unix
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Fortran
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Topic :: Scientific/Engineering :: Atmospheric Science
Requires-Python: >=3.8
Description-Content-Type: text/markdown
License-File: LICENSE.md
Requires-Dist: numba
Requires-Dist: numpy
Requires-Dist: pandas
Requires-Dist: pyyaml
Requires-Dist: scipy
Requires-Dist: setuptools
Requires-Dist: setuptools-scm
Requires-Dist: xarray
pyrcel: cloud parcel model
==========================
![sample parcel model run](docs/figs/model_example.png)
[![DOI](https://zenodo.org/badge/12927551.svg)](https://zenodo.org/badge/latestdoi/12927551)[![PyPI Version](https://badge.fury.io/py/pyrcel.svg)](https://badge.fury.io/py/pyrcel)[![CircleCI Build Status](https://circleci.com/gh/darothen/pyrcel/tree/master.svg?style=svg)](https://circleci.com/gh/darothen/pyrcel/tree/master)[![Documentation Status](https://readthedocs.org/projects/pyrcel/badge/?version=stable)](http://pyrcel.readthedocs.org/en/stable/?badge=stable)
This is an implementation of a simple, adiabatic cloud parcel model for use in
aerosol-cloud interaction studies. [Rothenberg and Wang (2016)](http://journals.ametsoc.org/doi/full/10.1175/JAS-D-15-0223.1) discuss the model in detail and its improvements
and changes over [Nenes et al (2001)][nenes2001]:
* Implementation of κ-Köhler theory for condensation physics ([Petters and
Kreidenweis, 2007)][pk2007]
* Extension of model to handle arbitrary sectional representations of aerosol
populations, based on user-controlled empirical or parameterized size distributions
* Improved, modular numerical framework for integrating the model, including bindings
to several different stiff integrators:
- `lsoda` - [scipy ODEINT wrapper](http://docs.scipy.org/doc/scipy/reference/generated/scipy.integrate.odeint.html)
- `vode, lsode*, lsoda*` - ODEPACK via [odespy][hplgit]
- `cvode` - SUNDIALS via [Assimulo](http://www.jmodelica.org/assimulo_home/index.html#)
among other details. It also includes a library of droplet activation routines and scripts/notebooks for evaluating those schemes against equivalent calculations done with the parcel model.
Updated code can be found the project [github repository](https://github.com/darothen/pyrcel). If you'd like to use this code or have any questions about it, please [contact the author][author_email]. In particular, if you use this code for research purposes, be sure to carefully read through the model and ensure that you have tweaked/configured it for your purposes (i.e., modifying the accomodation coefficient); other derived quantities).
[Detailed documentation is available](http://pyrcel.readthedocs.org/en/latest/index.html), including a [scientific description](http://pyrcel.readthedocs.org/en/latest/sci_descr.html), [installation details](http://pyrcel.readthedocs.org/en/latest/install.html), and a [basic example](http://pyrcel.readthedocs.org/en/latest/examples/basic_run.html) which produces a figure like the plot at the top of this page.
Requirements
------------
**Required**
* Python >= 3.7
* [numba](http://numba.pydata.org)
* [NumPy](http://www.numpy.org)
* [SciPy](http://www.scipy.org)
* [pandas](http://pandas.pydata.org) - v0.25+
* [xarray](http://xarray.pydata.org/en/stable/) - v2023+
* [PyYAML](http://pyyaml.org/)
**Optional**
The following packages are used for better numerics (ODE solving)
* [Assimulo](http://www.jmodelica.org/assimulo)
The easiest way to satisfy the basic requirements for building and running the
model is to use the [Anaconda](http://continuum.io/downloads) scientific Python
distribution. Alternatively, a
[miniconda environment](http://conda.pydata.org/docs/using/envs.html) is
provided to quickly set-up and get running the model. Assimulo's dependency on
the SUNDIALS library makes it a little bit tougher to install in an automated
fashion, so it has not been included in the automatic setup provided here; you
should refer to [Assimulo's documentation](http://www.jmodelica.org/assimulo_home/installation.html)
for more information on its installation process. Note that many components of
the model and package can be used without Assimulo.
Development
-----------
[http://github.com/darothen/pyrcel]()
Please fork this repository if you intend to develop the model further so that the
code's provenance can be maintained.
License
-------
[All scientific code should be licensed](http://www.astrobetter.com/the-whys-and-hows-of-licensing-scientific-code/). This code is released under the New BSD (3-clause) [license](LICENSE.md).
[author_email]: mailto:daniel@danielrothenberg.com
[nenes2001]: http://nenes.eas.gatech.edu/Preprints/KinLimitations_TellusPP.pdf
[pk2007]: http://www.atmos-chem-phys.net/7/1961/2007/acp-7-1961-2007.html
[hplgit]: https://github.com/hplgit/odespy

@@ -1,28 +0,132 @@

Metadata-Version: 1.1
Metadata-Version: 2.1
Name: pyrcel
Version: 1.3.1
Version: 1.3.2
Summary: pyrcel: 0D adiabatic cloud parcel model
Home-page: https://github.com/darothen/pyrcel
Author: Daniel Rothenberg
Author-email: darothen@mit.edu
License: New BSD (3-clause)
Download-URL: https://github.com/darothen/pyrcel
Description:
This code implements a relatively simple, adiabatic cloud parcel model for studying aerosol
activation. It allows the user to peform and iterate parcel model experiments in a simple
fashion through the use of an object-oriented implementation. Furthermore, interfaces for
several numerical solvers are built into the model so that users can choose whatever scheme
they would like to run with the model.
Platform: UNKNOWN
Author-email: Daniel Rothenberg <daniel@danielrothenberg.com>
License: Copyright (c) Daniel Rothenberg, 2012-2019
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Project-URL: Documentation, https://pyrcel.readthedocs.io/en/latest/
Project-URL: Repository, https://github.com/darothen/pyrcel
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Console
Classifier: Intended Audience :: Science/Research
Classifier: License :: OSI Approved :: MIT License
Classifier: License :: OSI Approved :: BSD License
Classifier: Natural Language :: English
Classifier: Operating System :: Unix
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Fortran
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Topic :: Scientific/Engineering :: Atmospheric Science
Requires-Python: >=3.8
Description-Content-Type: text/markdown
License-File: LICENSE.md
Requires-Dist: numba
Requires-Dist: numpy
Requires-Dist: pandas
Requires-Dist: pyyaml
Requires-Dist: scipy
Requires-Dist: setuptools
Requires-Dist: setuptools-scm
Requires-Dist: xarray
pyrcel: cloud parcel model
==========================
![sample parcel model run](docs/figs/model_example.png)
[![DOI](https://zenodo.org/badge/12927551.svg)](https://zenodo.org/badge/latestdoi/12927551)[![PyPI Version](https://badge.fury.io/py/pyrcel.svg)](https://badge.fury.io/py/pyrcel)[![CircleCI Build Status](https://circleci.com/gh/darothen/pyrcel/tree/master.svg?style=svg)](https://circleci.com/gh/darothen/pyrcel/tree/master)[![Documentation Status](https://readthedocs.org/projects/pyrcel/badge/?version=stable)](http://pyrcel.readthedocs.org/en/stable/?badge=stable)
This is an implementation of a simple, adiabatic cloud parcel model for use in
aerosol-cloud interaction studies. [Rothenberg and Wang (2016)](http://journals.ametsoc.org/doi/full/10.1175/JAS-D-15-0223.1) discuss the model in detail and its improvements
and changes over [Nenes et al (2001)][nenes2001]:
* Implementation of κ-Köhler theory for condensation physics ([Petters and
Kreidenweis, 2007)][pk2007]
* Extension of model to handle arbitrary sectional representations of aerosol
populations, based on user-controlled empirical or parameterized size distributions
* Improved, modular numerical framework for integrating the model, including bindings
to several different stiff integrators:
- `lsoda` - [scipy ODEINT wrapper](http://docs.scipy.org/doc/scipy/reference/generated/scipy.integrate.odeint.html)
- `vode, lsode*, lsoda*` - ODEPACK via [odespy][hplgit]
- `cvode` - SUNDIALS via [Assimulo](http://www.jmodelica.org/assimulo_home/index.html#)
among other details. It also includes a library of droplet activation routines and scripts/notebooks for evaluating those schemes against equivalent calculations done with the parcel model.
Updated code can be found the project [github repository](https://github.com/darothen/pyrcel). If you'd like to use this code or have any questions about it, please [contact the author][author_email]. In particular, if you use this code for research purposes, be sure to carefully read through the model and ensure that you have tweaked/configured it for your purposes (i.e., modifying the accomodation coefficient); other derived quantities).
[Detailed documentation is available](http://pyrcel.readthedocs.org/en/latest/index.html), including a [scientific description](http://pyrcel.readthedocs.org/en/latest/sci_descr.html), [installation details](http://pyrcel.readthedocs.org/en/latest/install.html), and a [basic example](http://pyrcel.readthedocs.org/en/latest/examples/basic_run.html) which produces a figure like the plot at the top of this page.
Requirements
------------
**Required**
* Python >= 3.7
* [numba](http://numba.pydata.org)
* [NumPy](http://www.numpy.org)
* [SciPy](http://www.scipy.org)
* [pandas](http://pandas.pydata.org) - v0.25+
* [xarray](http://xarray.pydata.org/en/stable/) - v2023+
* [PyYAML](http://pyyaml.org/)
**Optional**
The following packages are used for better numerics (ODE solving)
* [Assimulo](http://www.jmodelica.org/assimulo)
The easiest way to satisfy the basic requirements for building and running the
model is to use the [Anaconda](http://continuum.io/downloads) scientific Python
distribution. Alternatively, a
[miniconda environment](http://conda.pydata.org/docs/using/envs.html) is
provided to quickly set-up and get running the model. Assimulo's dependency on
the SUNDIALS library makes it a little bit tougher to install in an automated
fashion, so it has not been included in the automatic setup provided here; you
should refer to [Assimulo's documentation](http://www.jmodelica.org/assimulo_home/installation.html)
for more information on its installation process. Note that many components of
the model and package can be used without Assimulo.
Development
-----------
[http://github.com/darothen/pyrcel]()
Please fork this repository if you intend to develop the model further so that the
code's provenance can be maintained.
License
-------
[All scientific code should be licensed](http://www.astrobetter.com/the-whys-and-hows-of-licensing-scientific-code/). This code is released under the New BSD (3-clause) [license](LICENSE.md).
[author_email]: mailto:daniel@danielrothenberg.com
[nenes2001]: http://nenes.eas.gatech.edu/Preprints/KinLimitations_TellusPP.pdf
[pk2007]: http://www.atmos-chem-phys.net/7/1961/2007/acp-7-1961-2007.html
[hplgit]: https://github.com/hplgit/odespy

@@ -0,4 +1,8 @@

numba
numpy
pandas
pyyaml
scipy
pandas
future
setuptools
setuptools-scm
xarray

@@ -0,51 +1,41 @@

.gitignore
.readthedocs.yaml
LICENSE.md
MANIFEST.in
setup.py
doc/Makefile
doc/build_doc.sh
doc/conf.py
doc/environment.yml
doc/index.rst
doc/install.rst
doc/parcel.rst
doc/reference.rst
doc/sci_descr.rst
doc/examples/Makefile
doc/examples/README.md
doc/examples/activate.ipynb
doc/examples/activate.rst
doc/examples/basic_run.ipynb
doc/examples/basic_run.rst
doc/examples/activate_files/activate_13_0.png
doc/examples/basic_run_files/basic_run_13_0.png
doc/examples/basic_run_files/basic_run_9_1.png
doc/figs/model_example.png
doc/figs/sample_script.png
doc/generated/pyrcel.ParcelModel.rst
doc/generated/pyrcel.activation.arg2000.rst
doc/generated/pyrcel.activation.binned_activation.rst
doc/generated/pyrcel.activation.lognormal_activation.rst
doc/generated/pyrcel.activation.mbn2014.rst
doc/generated/pyrcel.activation.ming2006.rst
doc/generated/pyrcel.activation.multi_mode_activation.rst
doc/generated/pyrcel.activation.shipwayabel2010.rst
doc/generated/pyrcel.aerosol.AerosolSpecies.rst
doc/generated/pyrcel.aerosol.dist_to_conc.rst
doc/generated/pyrcel.distributions.BaseDistribution.rst
doc/generated/pyrcel.distributions.Gamma.rst
doc/generated/pyrcel.distributions.Lognorm.rst
doc/generated/pyrcel.distributions.MultiModeLognorm.rst
doc/generated/pyrcel.driver.iterate_runs.rst
doc/generated/pyrcel.driver.run_model.rst
doc/generated/pyrcel.thermo.Seq.rst
doc/generated/pyrcel.thermo.Seq_approx.rst
doc/generated/pyrcel.thermo.critical_curve.rst
doc/generated/pyrcel.thermo.dv.rst
doc/generated/pyrcel.thermo.es.rst
doc/generated/pyrcel.thermo.ka.rst
doc/generated/pyrcel.thermo.kohler_crit.rst
doc/generated/pyrcel.thermo.rho_air.rst
doc/generated/pyrcel.thermo.sigma_w.rst
doc/templates/class.rst
README.md
TODO.md
pyproject.toml
requirements.txt
.circleci/config.yml
ci/requirements-py310.yml
ci/requirements-py311.yml
ci/requirements-py38.yml
ci/requirements-py39.yml
docs/Makefile
docs/build_doc.sh
docs/conf.py
docs/environment.yml
docs/index.rst
docs/install.rst
docs/parcel.rst
docs/reference.rst
docs/requirements.txt
docs/sci_descr.rst
docs/examples/Makefile
docs/examples/README.md
docs/examples/activate.ipynb
docs/examples/activate.rst
docs/examples/basic_run.ipynb
docs/examples/basic_run.rst
docs/examples/activate_files/activate_13_0.png
docs/examples/basic_run_files/basic_run_13_0.png
docs/examples/basic_run_files/basic_run_9_1.png
docs/figs/model_example.png
docs/figs/sample_script.png
docs/templates/class.rst
examples/simple.yml
examples/simple_mono.yml
examples/simple_supersat.yml
pyrcel/__init__.py
pyrcel/_parcel_aux_numba.py
pyrcel/activation.py

@@ -59,5 +49,5 @@ pyrcel/aerosol.py

pyrcel/parcel.py
pyrcel/parcel_aux.pyx
pyrcel/postprocess.py
pyrcel/thermo.py
pyrcel/util.py
pyrcel/version.py

@@ -68,5 +58,12 @@ pyrcel/vis.py

pyrcel.egg-info/dependency_links.txt
pyrcel.egg-info/entry_points.txt
pyrcel.egg-info/requires.txt
pyrcel.egg-info/top_level.txt
pyrcel/data/std_atm.csv
scripts/run_parcel
pyrcel/scripts/__init__.py
pyrcel/scripts/run_parcel.py
pyrcel/test/__init__.py
pyrcel/test/generate_data.py
pyrcel/test/pm_test.py
pyrcel/test/results.dict
pyrcel/test/test_thermo.py

@@ -11,12 +11,18 @@ """

from __future__ import absolute_import
from importlib.metadata import version as _version
from . version import __version__
__author__ = "Daniel Rothenberg <darothen@mit.edu>"
try:
__version__ = _version("pyrcel")
except Exception:
# This is a local copy, or a copy that was not installed via setuptools
__version__ = "local"
from . activation import *
from . aerosol import *
from . distributions import *
from . driver import *
from . parcel import *
from . thermo import *
__author__ = "Daniel Rothenberg <daniel@danielrothenberg.com>"
# TODO: Re-factor module-wide implicit imports
from .activation import *
from .aerosol import *
from .distributions import *
from .driver import *
from .parcel import *
from .thermo import *
""" Collection of activation parameterizations.
"""
from __future__ import absolute_import
from builtins import zip
from builtins import range
import numpy as np
from scipy.special import erfc
from . thermo import (es, rho_air, ka, ka_cont, dv, dv_cont,
sigma_w, kohler_crit, Seq)
from . import constants as c
from .thermo import dv, dv_cont, es, ka_cont, kohler_crit, sigma_w
def _unpack_aerosols(aerosols):
""" Convert a list of :class:`AerosolSpecies` into lists of aerosol properties.
"""Convert a list of :class:`AerosolSpecies` into lists of aerosol properties.

@@ -47,3 +42,3 @@ Parameters

def lognormal_activation(smax, mu, sigma, N, kappa, sgi=None, T=None, approx=True):
""" Compute the activated number/fraction from a lognormal mode
"""Compute the activated number/fraction from a lognormal mode

@@ -78,5 +73,5 @@ Parameters

ui = 2.*np.log(sgi/smax)/(3.*np.sqrt(2.)*np.log(sigma))
N_act = 0.5*N*erfc(ui)
act_frac = N_act/N
ui = 2.0 * np.log(sgi / smax) / (3.0 * np.sqrt(2.0) * np.log(sigma))
N_act = 0.5 * N * erfc(ui)
act_frac = N_act / N

@@ -87,3 +82,3 @@ return N_act, act_frac

def binned_activation(Smax, T, rs, aerosol, approx=False):
""" Compute the activation statistics of a given aerosol, its transient
"""Compute the activation statistics of a given aerosol, its transient
size distribution, and updraft characteristics. Following Nenes et al, 2001

@@ -122,6 +117,8 @@ also compute the kinetic limitation statistics for the aerosol.

# a Series
if hasattr(rs, 'values'):
if hasattr(rs, "values"):
rs = rs.values
r_crits, s_crits = list(zip(*[kohler_crit(T, r_dry, kappa, approx) for r_dry in r_drys]))
r_crits, s_crits = list(
zip(*[kohler_crit(T, r_dry, kappa, approx) for r_dry in r_drys])
)
s_crits = np.array(s_crits)

@@ -132,5 +129,5 @@ r_crits = np.array(r_crits)

# less than the environmental supersaturation
activated_eq = (Smax >= s_crits)
activated_eq = Smax >= s_crits
N_eq = np.sum(Nis[activated_eq])
eq_frac = N_eq/N_tot
eq_frac = N_eq / N_tot

@@ -143,3 +140,3 @@ # Kinetic calculation - find the aerosol with the smallest dry radius which has

if not np.any(is_r_large):
N_kn, kn_frac = 0., 0.
N_kn, kn_frac = 0.0, 0.0
phi = 1.0

@@ -149,3 +146,3 @@ else:

N_kn = np.sum(Nis[smallest_ind:])
kn_frac = N_kn/N_tot
kn_frac = N_kn / N_tot

@@ -157,9 +154,9 @@ # Unactivated - all droplets smaller than their critical size

rs_drops = rs[droplets]
too_small = (rs_drops < r_crits_drops)
too_small = rs_drops < r_crits_drops
N_unact = np.sum(Nis_drops[too_small])
phi = N_unact/N_kn
phi = N_unact / N_kn
alpha = N_kn/N_eq
alpha = N_kn / N_eq

@@ -169,4 +166,5 @@ return eq_frac, kn_frac, alpha, phi

# noinspection PyUnresolvedReferences
def multi_mode_activation(Smax, T, aerosols, rss):
""" Compute the activation statistics of a multi-mode, binned_activation
"""Compute the activation statistics of a multi-mode, binned_activation
aerosol population.

@@ -197,2 +195,3 @@

######################################################################

@@ -204,4 +203,5 @@ # Code implementing the Nenes and Seinfeld (2003) parameterization,

def _vpres(T):
""" Polynomial approximation of saturated water vapour pressure as
"""Polynomial approximation of saturated water vapour pressure as
a function of temperature.

@@ -225,9 +225,15 @@

# Coefficients for vapor pressure approximation
A = [6.107799610e+0, 4.436518521e-1, 1.428945805e-2,
2.650648471e-4, 3.031240396e-6, 2.034080948e-8,
6.136820929e-11]
A = [
6.107799610e0,
4.436518521e-1,
1.428945805e-2,
2.650648471e-4,
3.031240396e-6,
2.034080948e-8,
6.136820929e-11,
]
T -= 273 # Convert from Kelvin to C
vp = A[-1]*T
vp = A[-1] * T
for ai in reversed(A[1:-1]):
vp = (vp + ai)*T
vp = (vp + ai) * T
vp += A[0]

@@ -238,12 +244,12 @@ return vp

def _erfp(x):
""" Polynomial approximation to error function """
"""Polynomial approximation to error function"""
AA = [0.278393, 0.230389, 0.000972, 0.078108]
y = np.abs(1.0 * x)
axx = 1.0 + y*(AA[0] + y*(AA[1] + y*(AA[2] + y*AA[3])))
axx = axx*axx
axx = axx*axx
axx = 1.0 - (1.0/axx)
axx = 1.0 + y * (AA[0] + y * (AA[1] + y * (AA[2] + y * AA[3])))
axx = axx * axx
axx = axx * axx
axx = 1.0 - (1.0 / axx)
if x <= 0.:
axx = -1.0*axx
if x <= 0.0:
axx = -1.0 * axx

@@ -253,6 +259,18 @@ return axx

def mbn2014(V, T, P, aerosols=[], accom=c.ac,
mus=[], sigmas=[], Ns=[], kappas=[],
xmin=1e-5, xmax=0.1, tol=1e-6, max_iters=100):
""" Computes droplet activation using an iterative scheme.
def mbn2014(
V,
T,
P,
aerosols=[],
accom=c.ac,
mus=[],
sigmas=[],
Ns=[],
kappas=[],
xmin=1e-5,
xmax=0.1,
tol=1e-6,
max_iters=100,
):
"""Computes droplet activation using an iterative scheme.

@@ -301,6 +319,6 @@ This method implements the iterative activation scheme under development by

d = _unpack_aerosols(aerosols)
mus = d['mus']
sigmas = d['sigmas']
kappas = d['kappas']
Ns = d['Ns']
mus = d["mus"]
sigmas = d["sigmas"]
kappas = d["kappas"]
Ns = d["Ns"]
else:

@@ -317,4 +335,4 @@ # Assert that the aerosol was already decomposed into component vars

dpgs = 2*(mus*1e-6)
Ns = Ns*1e6
dpgs = 2 * (mus * 1e-6)
Ns = Ns * 1e6

@@ -335,13 +353,13 @@ nmodes = len(Ns)

# rho_a = rho_air(T, P, RH=0.0) # MBN2014: could set RH=1.0, account for moisture
rho_a = P*Ma/R/T
aka = ka_cont(T) # MBN2014: could use thermo.ka(), include air density
rho_a = P * Ma / R / T
aka = ka_cont(T) # MBN2014: could use thermo.ka(), include air density
# surt = sigma_w(T)
surt = (0.0761 - 1.55e-4*(T-273.))
surt = 0.0761 - 1.55e-4 * (T - 273.0)
# Compute modal critical supersaturation (Sg) for each mode, corresponding
# to the critical supersaturation at the median diameter of each mode
A = 4.*Mw*surt/R/T/Rho_w
A = 4.0 * Mw * surt / R / T / Rho_w
# There are three different ways to do this:
# 1) original formula from MBN2014
f = lambda T, dpg, kappa: np.sqrt((A**3.)*4./27./kappa/(dpg**3.))
f = lambda T, dpg, kappa: np.sqrt((A**3.0) * 4.0 / 27.0 / kappa / (dpg**3.0))
# 2) detailed kohler calculation

@@ -358,25 +376,35 @@ # f = lambda T, dpg, kappa: kohler_crit(T, dpg/2., kappa)

# dv_orig = dv_cont(T, P)
dv_orig = 1e-4*(0.211/(P/1.013e5)*(T/273.)**1.94)
dv_orig = 1e-4 * (0.211 / (P / 1.013e5) * (T / 273.0) ** 1.94)
dv_big = 5.0e-6
dv_low = 1e-6*0.207683*(accom**(-0.33048))
dv_low = 1e-6 * 0.207683 * (accom ** (-0.33048))
coef = (2.*np.pi*Mw/(R*T))**0.5
coef = (2.0 * np.pi * Mw / (R * T)) ** 0.5
# TODO: Formatting on this average dv equation
dv_ave = (dv_orig/(dv_big-dv_low))*((dv_big-dv_low)-(2*dv_orig/accom)*coef*
(np.log((dv_big+(2*dv_orig/accom)*coef)/(dv_low+(2*dv_orig/accom)*
coef))))
dv_ave = (dv_orig / (dv_big - dv_low)) * (
(dv_big - dv_low)
- (2 * dv_orig / accom)
* coef
* (
np.log(
(dv_big + (2 * dv_orig / accom) * coef)
/ (dv_low + (2 * dv_orig / accom) * coef)
)
)
)
# Setup constants used in supersaturation equation
wv_pres_sat = _vpres(T)*(1e5/1e3) # MBN2014: could also use thermo.es()
alpha = g*Mw*L/Cp/R/T/T - g*Ma/R/T
beta1 = P*Ma/wv_pres_sat/Mw + Mw*L*L/Cp/R/T/T
beta2 = R*T*Rho_w/wv_pres_sat/dv_ave/Mw/4.0 + \
L*Rho_w/4.0/aka/T*(L*Mw/R/T - 1.0) # this is 1/G
beta = 0.5*np.pi*beta1*Rho_w/beta2/alpha/V/rho_a
wv_pres_sat = _vpres(T) * (1e5 / 1e3) # MBN2014: could also use thermo.es()
alpha = g * Mw * L / Cp / R / T / T - g * Ma / R / T
beta1 = P * Ma / wv_pres_sat / Mw + Mw * L * L / Cp / R / T / T
beta2 = (
R * T * Rho_w / wv_pres_sat / dv_ave / Mw / 4.0
+ L * Rho_w / 4.0 / aka / T * (L * Mw / R / T - 1.0)
) # this is 1/G
beta = 0.5 * np.pi * beta1 * Rho_w / beta2 / alpha / V / rho_a
cf1 = 0.5*np.sqrt((1/beta2)/(alpha*V))
cf2 = A/3.0
cf1 = 0.5 * np.sqrt((1 / beta2) / (alpha * V))
cf2 = A / 3.0
def _sintegral(smax):
""" Integrate the activation equation, using ``spar`` as the population
"""Integrate the activation equation, using ``spar`` as the population
splitting threshold.

@@ -388,19 +416,21 @@

zeta_c = ((16./9.)*alpha*V*beta2*(A**2))**0.25
delta = 1.0 - (zeta_c/smax)**4. # spar -> smax
critical = delta <= 0.
zeta_c = ((16.0 / 9.0) * alpha * V * beta2 * (A**2)) ** 0.25
delta = 1.0 - (zeta_c / smax) ** 4.0 # spar -> smax
critical = delta <= 0.0
if critical:
ratio = (2e7/3.0)*A*(smax**(-0.3824) - zeta_c**(-0.3824)) # Computing sp1 and sp2 (sp1 = sp2)
ratio = 1./np.sqrt(2.) + ratio
ratio = (
(2e7 / 3.0) * A * (smax ** (-0.3824) - zeta_c ** (-0.3824))
) # Computing sp1 and sp2 (sp1 = sp2)
ratio = 1.0 / np.sqrt(2.0) + ratio
if ratio > 1.:
ratio = 1. # cap maximum value
ssplt2 = smax*ratio
if ratio > 1.0:
ratio = 1.0 # cap maximum value
ssplt2 = smax * ratio
else:
ssplt1 = 0.5*(1. - np.sqrt(delta)) # min root --> sp1
ssplt2 = 0.5*(1. + np.sqrt(delta)) # max root --> sp2
ssplt1 = np.sqrt(ssplt1)*smax
ssplt2 = np.sqrt(ssplt2)*smax
ssplt1 = 0.5 * (1.0 - np.sqrt(delta)) # min root --> sp1
ssplt2 = 0.5 * (1.0 + np.sqrt(delta)) # max root --> sp2
ssplt1 = np.sqrt(ssplt1) * smax
ssplt2 = np.sqrt(ssplt2) * smax

@@ -416,14 +446,16 @@ ssplt = ssplt2 # secondary partitioning supersaturation

sqrtwo = np.sqrt(2.)
sqrtwo = np.sqrt(2.0)
for i in range(nmodes):
log_sigma = np.log(sigmas[i]) # ln(sigma_i)
log_sgi_smax = np.log(sgis[i]/smax) # ln(sg_i/smax)
log_sgi_sp2 = np.log(sgis[i]/ssplt2) # ln(sg_i/sp2
log_sigma = np.log(sigmas[i]) # ln(sigma_i)
log_sgi_smax = np.log(sgis[i] / smax) # ln(sg_i/smax)
log_sgi_sp2 = np.log(sgis[i] / ssplt2) # ln(sg_i/sp2
u_sp2 = 2.*log_sgi_sp2/(3.*sqrtwo*log_sigma)
u_smax = 2.*log_sgi_smax/(3.*sqrtwo*log_sigma)
u_sp2 = 2.0 * log_sgi_sp2 / (3.0 * sqrtwo * log_sigma)
u_smax = 2.0 * log_sgi_smax / (3.0 * sqrtwo * log_sigma)
# Subtract off the integrating factor
log_factor = 3.*log_sigma/(2.*sqrtwo)
log_factor = 3.0 * log_sigma / (2.0 * sqrtwo)
d_eq = A*2./sgis[i]/3./np.sqrt(3.) # Dpc/sqrt(3) - equilibrium diameter
d_eq = (
A * 2.0 / sgis[i] / 3.0 / np.sqrt(3.0)
) # Dpc/sqrt(3) - equilibrium diameter

@@ -433,31 +465,45 @@ erf_u_sp2 = _erfp(u_sp2 - log_factor) # ERF2

integ2[i] = (np.exp(9./8.*log_sigma*log_sigma)*Ns[i]/sgis[i])*(erf_u_sp2 - erf_u_smax)
integ2[i] = (
np.exp(9.0 / 8.0 * log_sigma * log_sigma) * Ns[i] / sgis[i]
) * (erf_u_sp2 - erf_u_smax)
if critical:
u_sp_plus = sqrtwo*log_sgi_sp2/3./log_sigma
u_sp_plus = sqrtwo * log_sgi_sp2 / 3.0 / log_sigma
erf_u_sp_plus = _erfp(u_sp_plus - log_factor)
integ1[i] = 0.
I_extra_term = Ns[i]*d_eq*np.exp((9./8.)*log_sigma*log_sigma)* \
(1.0 - erf_u_sp_plus)*((beta2*alpha*V)**0.5) # 'inertially limited' particles
integ1[i] = 0.0
I_extra_term = (
Ns[i]
* d_eq
* np.exp((9.0 / 8.0) * log_sigma * log_sigma)
* (1.0 - erf_u_sp_plus)
* ((beta2 * alpha * V) ** 0.5)
) # 'inertially limited' particles
else:
g_i = np.exp((9./2.)*log_sigma*log_sigma)
log_sgi_sp1 = np.log(sgis[i]/ssplt1) # ln(sg_i/sp1)
g_i = np.exp((9.0 / 2.0) * log_sigma * log_sigma)
log_sgi_sp1 = np.log(sgis[i] / ssplt1) # ln(sg_i/sp1)
int1_partial2 = Ns[i]*smax # building I1(0, sp2), eq (B4)
int1_partial2 *= ((1. - _erfp(u_sp2)) - 0.5*((sgis[i]/smax)**2.)*g_i* \
(1. - _erfp(u_sp2 + 3.*log_sigma/sqrtwo)))
int1_partial2 = Ns[i] * smax # building I1(0, sp2), eq (B4)
int1_partial2 *= (1.0 - _erfp(u_sp2)) - 0.5 * (
(sgis[i] / smax) ** 2.0
) * g_i * (1.0 - _erfp(u_sp2 + 3.0 * log_sigma / sqrtwo))
u_sp1 = 2.*log_sgi_sp1/(3.*sqrtwo*log_sigma)
int1_partial1 = Ns[i]*smax # building I1(0, sp1), eq (B4)
int1_partial1 *= ((1. - _erfp(u_sp1)) - 0.5*((sgis[i]/smax)**2.)*g_i*\
(1. - _erfp(u_sp1 + 3.*log_sigma/sqrtwo)))
u_sp1 = 2.0 * log_sgi_sp1 / (3.0 * sqrtwo * log_sigma)
int1_partial1 = Ns[i] * smax # building I1(0, sp1), eq (B4)
int1_partial1 *= (1.0 - _erfp(u_sp1)) - 0.5 * (
(sgis[i] / smax) ** 2.0
) * g_i * (1.0 - _erfp(u_sp1 + 3.0 * log_sigma / sqrtwo))
integ1[i] = int1_partial2 - int1_partial1 # I1(sp1, sp2)
integ1[i] = int1_partial2 - int1_partial1 # I1(sp1, sp2)
u_sp1_inertial = sqrtwo*log_sgi_sp1/3./log_sigma
u_sp1_inertial = sqrtwo * log_sgi_sp1 / 3.0 / log_sigma
erf_u_sp1 = _erfp(u_sp1_inertial - log_factor)
I_extra_term = Ns[i]*d_eq*np.exp((9./8.)*log_sigma*log_sigma)* \
(1.0 - erf_u_sp1)*((beta2*alpha*V)**0.5) # 'inertially limited' particles
I_extra_term = (
Ns[i]
* d_eq
* np.exp((9.0 / 8.0) * log_sigma * log_sigma)
* (1.0 - erf_u_sp1)
* ((beta2 * alpha * V) ** 0.5)
) # 'inertially limited' particles

@@ -472,11 +518,11 @@ # Compute total integral values

# Initial calculation
x1 = xmin # min cloud super sat -> 0.
x1 = xmin # min cloud super sat -> 0.
integ1, integ2 = _sintegral(x1)
# Note that we ignore the contribution from the FHH integral term in this
# implementation
y1 = (integ1*cf1 + integ2*cf2)*beta*x1 - 1.0
y1 = (integ1 * cf1 + integ2 * cf2) * beta * x1 - 1.0
x2 = xmax # max cloud super sat
integ1, integ2 = _sintegral(x2)
y2 = (integ1*cf1 + integ2*cf2)*beta*x2 - 1.0
y2 = (integ1 * cf1 + integ2 * cf2) * beta * x2 - 1.0

@@ -488,7 +534,7 @@ # Iteration of bisection routine to convergence

x3 = 0.5*(x1 + x2)
x3 = 0.5 * (x1 + x2)
integ1, integ2 = _sintegral(x3)
y3 = (integ1*cf1 + integ2*cf2)*beta*x3 - 1.0
y3 = (integ1 * cf1 + integ2 * cf2) * beta * x3 - 1.0
if (y1*y3) <= 0.: # different signs
if (y1 * y3) <= 0.0: # different signs
y2 = y3

@@ -499,8 +545,9 @@ x2 = x3

x1 = x3
if np.abs(x2 - x1 <= tol*x1): break
if np.abs(x2 - x1 <= tol * x1):
break
# Finalize bisection with one more intersection
x3 = 0.5*(x1 + x2)
x3 = 0.5 * (x1 + x2)
integ1, integ2 = _sintegral(x3)
y3 = (integ1*cf1 + integ2*cf2)*beta*x3 - 1.0
y3 = (integ1 * cf1 + integ2 * cf2) * beta * x3 - 1.0

@@ -511,3 +558,5 @@ smax = x3

for mu, sigma, N, kappa, sgi in zip(mus, sigmas, Ns, kappas, sgis):
N_act, act_frac = lognormal_activation(smax, mu*1e-6, sigma, N*1e-6, kappa, sgi)
N_act, act_frac = lognormal_activation(
smax, mu * 1e-6, sigma, N * 1e-6, kappa, sgi
)
n_acts.append(N_act)

@@ -519,5 +568,15 @@ act_fracs.append(act_frac)

def arg2000(V, T, P, aerosols=[], accom=c.ac,
mus=[], sigmas=[], Ns=[], kappas=[], min_smax=False):
""" Computes droplet activation using a psuedo-analytical scheme.
def arg2000(
V,
T,
P,
aerosols=[],
accom=c.ac,
mus=[],
sigmas=[],
Ns=[],
kappas=[],
min_smax=False,
):
"""Computes droplet activation using a psuedo-analytical scheme.

@@ -576,6 +635,6 @@ This method implements the psuedo-analytical scheme of [ARG2000] to

d = _unpack_aerosols(aerosols)
mus = d['mus']
sigmas = d['sigmas']
kappas = d['kappas']
Ns = d['Ns']
mus = d["mus"]
sigmas = d["sigmas"]
kappas = d["kappas"]
Ns = d["Ns"]
else:

@@ -589,23 +648,21 @@ # Assert that the aerosol was already decomposed into component vars

# Originally from Abdul-Razzak 1998 w/ Ma. Need kappa formulation
wv_sat = es(T-273.15)
alpha = (c.g*c.Mw*c.L)/(c.Cp*c.R*(T**2)) - (c.g*c.Ma)/(c.R*T)
gamma = (c.R*T)/(wv_sat*c.Mw) + (c.Mw*(c.L**2))/(c.Cp*c.Ma*T*P)
wv_sat = es(T - 273.15)
alpha = (c.g * c.Mw * c.L) / (c.Cp * c.R * (T**2)) - (c.g * c.Ma) / (c.R * T)
gamma = (c.R * T) / (wv_sat * c.Mw) + (c.Mw * (c.L**2)) / (c.Cp * c.Ma * T * P)
# Condensation effects - base calculation
G_a = (c.rho_w*c.R*T)/(wv_sat*dv_cont(T, P)*c.Mw)
G_b = (c.L*c.rho_w*((c.L*c.Mw/(c.R*T))-1))/(ka_cont(T)*T)
G_0 = 1./(G_a + G_b) # reference, no kinetic effects
G_a = (c.rho_w * c.R * T) / (wv_sat * dv_cont(T, P) * c.Mw)
G_b = (c.L * c.rho_w * ((c.L * c.Mw / (c.R * T)) - 1)) / (ka_cont(T) * T)
G_0 = 1.0 / (G_a + G_b) # reference, no kinetic effects
Smis = []
Sparts = []
for (mu, sigma, N, kappa) in zip(mus, sigmas, Ns, kappas):
for mu, sigma, N, kappa in zip(mus, sigmas, Ns, kappas):
am = mu * 1e-6
N = N * 1e6
am = mu*1e-6
N = N*1e6
fi = 0.5 * np.exp(2.5 * (np.log(sigma) ** 2))
gi = 1.0 + 0.25 * np.log(sigma)
fi = 0.5*np.exp(2.5*(np.log(sigma)**2))
gi = 1.0 + 0.25*np.log(sigma)
A = (2.*sigma_w(T)*c.Mw)/(c.rho_w*c.R*T)
A = (2.0 * sigma_w(T) * c.Mw) / (c.rho_w * c.R * T)
rc_mode, Smi2 = kohler_crit(T, am, kappa, approx=True)

@@ -620,22 +677,22 @@

# and new value for condensation coefficient
G_a = (c.rho_w*c.R*T)/(wv_sat*dv(T, rc_mode, P, accom)*c.Mw)
G_b = (c.L*c.rho_w*((c.L*c.Mw/(c.R*T))-1))/(ka_cont(T)*T)
G_ac = 1./(G_a + G_b)
G_a = (c.rho_w * c.R * T) / (wv_sat * dv(T, rc_mode, P, accom) * c.Mw)
G_b = (c.L * c.rho_w * ((c.L * c.Mw / (c.R * T)) - 1)) / (ka_cont(T) * T)
G_ac = 1.0 / (G_a + G_b)
# G_ac1 - estimate using critical radius of number mode radius,
# unity condensation coefficient; re_use G_b (no change)
G_a = (c.rho_w*c.R*T)/(wv_sat*dv(T, rc_mode, P, accom=1.0)*c.Mw)
G_ac1 = 1./(G_a + G_b)
G_a = (c.rho_w * c.R * T) / (wv_sat * dv(T, rc_mode, P, accom=1.0) * c.Mw)
G_ac1 = 1.0 / (G_a + G_b)
# Combine using scaling formula (40) from [GHAN2011]
G = G_0*G_ac/G_ac1
G = G_0 * G_ac / G_ac1
# Parameterization integral solutions
zeta = (2./3.)*A*(np.sqrt(alpha*V/G))
etai = ((alpha*V/G)**(3./2.))/(N*gamma*c.rho_w*2.*np.pi)
zeta = (2.0 / 3.0) * A * (np.sqrt(alpha * V / G))
etai = ((alpha * V / G) ** (3.0 / 2.0)) / (N * gamma * c.rho_w * 2.0 * np.pi)
# Contributions to maximum supersaturation
Spa = fi*((zeta/etai)**(1.5))
Spb = gi*(((Smi2**2)/(etai + 3.*zeta))**(0.75))
S_part = (1./(Smi2**2))*(Spa + Spb)
Spa = fi * ((zeta / etai) ** (1.5))
Spb = gi * (((Smi2**2) / (etai + 3.0 * zeta)) ** (0.75))
S_part = (1.0 / (Smi2**2)) * (Spa + Spb)

@@ -648,11 +705,11 @@ Smis.append(Smi2)

for i in range(len(mus)):
mode_smax = 1./np.sqrt(Sparts[i])
mode_smax = 1.0 / np.sqrt(Sparts[i])
if mode_smax < smax:
smax = mode_smax
else: # Use default competition parameterization
smax = 1./np.sqrt(np.sum(Sparts))
else: # Use default competition parameterization
smax = 1.0 / np.sqrt(np.sum(Sparts))
n_acts, act_fracs = [], []
for mu, sigma, N, kappa, sgi in zip(mus, sigmas, Ns, kappas, Smis):
N_act, act_frac = lognormal_activation(smax, mu*1e-6, sigma, N, kappa, sgi)
N_act, act_frac = lognormal_activation(smax, mu * 1e-6, sigma, N, kappa, sgi)
n_acts.append(N_act)

@@ -665,3 +722,3 @@ act_fracs.append(act_frac)

def shipwayabel2010(V, T, P, aerosol):
""" Activation scheme following Shipway and Abel, 2010
"""Activation scheme following Shipway and Abel, 2010
(doi:10.1016/j.atmosres.2009.10.005).

@@ -668,0 +725,0 @@

""" Container class for encapsulating data about aerosol size distributions.
"""
from builtins import zip
from builtins import object
import numpy as np
from . distributions import BaseDistribution, Lognorm, MultiModeLognorm
from .distributions import BaseDistribution, Lognorm, MultiModeLognorm
def dist_to_conc(dist, r_min, r_max, rule="trapezoid"):
""" Converts a swath of a size distribution function to an actual number
"""Converts a swath of a size distribution function to an actual number
concentration.

@@ -49,11 +46,13 @@

if rule == "trapezoid":
return width*0.5*(pdf(r_max) + pdf(r_min))
return width * 0.5 * (pdf(r_max) + pdf(r_min))
elif rule == "simpson":
return (width/6.)*(pdf(r_max) + pdf(r_min) + 4.*pdf(0.5*(r_max + r_min)))
return (width / 6.0) * (
pdf(r_max) + pdf(r_min) + 4.0 * pdf(0.5 * (r_max + r_min))
)
else:
return width*pdf(0.5*(r_max + r_min))
return width * pdf(0.5 * (r_max + r_min))
class AerosolSpecies(object):
""" Container class for organizing aerosol metadata.
"""Container class for organizing aerosol metadata.

@@ -135,10 +134,19 @@ To allow flexibility with how aerosols are defined in the model, this class is

"""
def __init__(self, species, distribution, kappa,
rho=None, mw=None,
bins=None, r_min=None, r_max=None):
def __init__(
self,
species,
distribution,
kappa,
rho=None,
mw=None,
bins=None,
r_min=None,
r_max=None,
):
self.species = species # Species molecular formula
self.kappa = kappa # Kappa hygroscopicity parameter
self.rho = rho # aerosol density kg/m^3
self.kappa = kappa # Kappa hygroscopicity parameter
self.rho = rho # aerosol density kg/m^3
self.mw = mw
self.bins = bins # Number of bins for discretizing the size distribution
self.bins = bins # Number of bins for discretizing the size distribution

@@ -148,3 +156,3 @@ # Handle the size distribution passed to the constructor

if isinstance(distribution, dict):
self.r_drys = np.array(distribution['r_drys'])*1e-6
self.r_drys = np.array(distribution["r_drys"]) * 1e-6

@@ -156,8 +164,8 @@ # Compute boundaries for bins. To do this, assume the right

if len(self.r_drys) > 1:
mid1 = np.sqrt(self.r_drys[0]*self.r_drys[1])
lr = (self.r_drys[0]**2.) / mid1
rs = [lr, mid1, ]
mid1 = np.sqrt(self.r_drys[0] * self.r_drys[1])
lr = (self.r_drys[0] ** 2.0) / mid1
rs = [lr, mid1]
for r_dry in self.r_drys[1:]:
rs.append(r_dry**2. / rs[-1])
self.rs = np.array(rs)*1e6
rs.append(r_dry**2.0 / rs[-1])
self.rs = np.array(rs) * 1e6
else:

@@ -167,3 +175,3 @@ # Truly mono-disperse, so no boundaries (we don't actually need

self.rs = None
self.Nis = np.array(distribution['Nis'])
self.Nis = np.array(distribution["Nis"])
self.N = np.sum(self.Nis)

@@ -174,4 +182,6 @@

if bins is None:
raise ValueError("Need to specify `bins` argument if passing a Lognorm "
"distribution")
raise ValueError(
"Need to specify `bins` argument if passing a Lognorm "
"distribution"
)

@@ -182,22 +192,29 @@ if isinstance(bins, (list, np.ndarray)):

if not r_min:
lr = np.log10(distribution.mu/(10.*distribution.sigma))
lr = np.log10(distribution.mu / (10.0 * distribution.sigma))
else:
lr = np.log10(r_min)
if not r_max:
rr = np.log10(distribution.mu*10.*distribution.sigma)
rr = np.log10(distribution.mu * 10.0 * distribution.sigma)
else:
rr = np.log10(r_max)
self.rs = np.logspace(lr, rr, num=bins+1)[:]
self.rs = np.logspace(lr, rr, num=bins + 1)[:]
nbins = len(self.rs)
mids = np.array([np.sqrt(a*b)
for a, b in zip(self.rs[:-1], self.rs[1:])])[0:nbins]
self.Nis = np.array([0.5*(b-a)*(distribution.pdf(a) + distribution.pdf(b))
for a, b in zip(self.rs[:-1], self.rs[1:])])[0:nbins]
self.r_drys = mids*1e-6
mids = np.array(
[np.sqrt(a * b) for a, b in zip(self.rs[:-1], self.rs[1:])]
)[0:nbins]
self.Nis = np.array(
[
0.5 * (b - a) * (distribution.pdf(a) + distribution.pdf(b))
for a, b in zip(self.rs[:-1], self.rs[1:])
]
)[0:nbins]
self.r_drys = mids * 1e-6
elif isinstance(distribution, MultiModeLognorm):
if bins is None:
raise ValueError("Need to specify `bins` argument if passing a "
"MultiModeLognorm distribution")
raise ValueError(
"Need to specify `bins` argument if passing a "
"MultiModeLognorm distribution"
)

@@ -213,21 +230,27 @@ small_mu = distribution.mus[0]

if not r_min:
lr = np.log10(small_mu/(10.*small_sigma))
lr = np.log10(small_mu / (10.0 * small_sigma))
else:
lr = np.log10(r_min)
if not r_max:
rr = np.log10(big_mu*10.*big_sigma)
rr = np.log10(big_mu * 10.0 * big_sigma)
else:
rr = np.log10(r_max)
self.rs = np.logspace(lr, rr, num=bins+1)[:]
self.rs = np.logspace(lr, rr, num=bins + 1)[:]
nbins = len(self.rs)
mids = np.array([np.sqrt(a*b)
for a, b in zip(self.rs[:-1], self.rs[1:])])[0:nbins]
self.Nis = np.array([0.5*(b-a)*(distribution.pdf(a) + distribution.pdf(b))
for a, b in zip(self.rs[:-1], self.rs[1:])])[0:nbins]
self.r_drys = mids*1e-6
mids = np.array(
[np.sqrt(a * b) for a, b in zip(self.rs[:-1], self.rs[1:])]
)[0:nbins]
self.Nis = np.array(
[
0.5 * (b - a) * (distribution.pdf(a) + distribution.pdf(b))
for a, b in zip(self.rs[:-1], self.rs[1:])
]
)[0:nbins]
self.r_drys = mids * 1e-6
else:
raise ValueError("Could not work with size distribution of type %r"
% type(distribution))
raise ValueError(
"Could not work with size distribution of type %r" % type(distribution)
)

@@ -241,3 +264,3 @@ # Correct to SI units

def stats(self):
""" Compute useful statistics about this aerosol's size distribution.
"""Compute useful statistics about this aerosol's size distribution.

@@ -262,15 +285,17 @@ Returns

if self.rho:
stats_dict["total_mass"] = stats_dict["total_volume"] * self.rho
stats_dict["mean_mass"] = stats_dict["mean_volume"] * self.rho
stats_dict["specific_surface_area"] = (
stats_dict["total_surface_area"] / stats_dict["total_mass"]
)
stats_dict['total_mass'] = stats_dict['total_volume']*self.rho
stats_dict['mean_mass'] = stats_dict['mean_volume']*self.rho
stats_dict['specific_surface_area'] = \
stats_dict['total_surface_area']/stats_dict['total_mass']
return self.rho
else:
raise ValueError("Could not work with size distribution of type %r"
% type(self.distribution))
raise ValueError(
"Could not work with size distribution of type %r"
% type(self.distribution)
)
def __repr__(self):
return "%s - %r" % (self.species, self.distribution)

@@ -46,30 +46,26 @@ """ Commonly used constants in microphysics and aerosol thermodynamics equations as

import pandas as pd
from io import StringIO
import pkg_resources
from future import standard_library
standard_library.install_aliases()
g = 9.81 #: Gravitational constant, m/s^2
Cp = 1004.0 #: Specific heat of dry air at constant pressure, J/kg
L = 2.25e6 #: Latent heat of condensation, J/kg
rho_w = 1e3 #: Density of water, kg/m^3
R = 8.314 #: Universal gas constant, J/(mol K)
Mw = 18.0/1e3 #: Molecular weight of water, kg/mol
Ma = 28.9/1e3 #: Molecular weight of dry air, kg/mol
Rd = R/Ma #: Gas constant for dry air, J/(kg K)
Rv = R/Mw #: Gas constant for water vapor, J/(kg K)
Dv = 3.e-5 #: Diffusivity of water vapor in air, m^2/s
ac = 1.0 #: condensation constant
Ka = 2.e-2 #: Thermal conductivity of air, J/m/s/K
at = 0.96 #: thermal accomodation coefficient
epsilon = 0.622 #: molecular weight of water / molecular weight of dry air
g = 9.81 #: Gravitational constant, m/s^2
Cp = 1004.0 #: Specific heat of dry air at constant pressure, J/kg
L = 2.25e6 #: Latent heat of condensation, J/kg
rho_w = 1e3 #: Density of water, kg/m^3
R = 8.314 #: Universal gas constant, J/(mol K)
Mw = 18.0 / 1e3 #: Molecular weight of water, kg/mol
Ma = 28.9 / 1e3 #: Molecular weight of dry air, kg/mol
Rd = R / Ma #: Gas constant for dry air, J/(kg K)
Rv = R / Mw #: Gas constant for water vapor, J/(kg K)
Dv = 3.0e-5 #: Diffusivity of water vapor in air, m^2/s
ac = 1.0 #: condensation constant
Ka = 2.0e-2 #: Thermal conductivity of air, J/m/s/K
at = 0.96 #: thermal accomodation coefficient
epsilon = 0.622 #: molecular weight of water / molecular weight of dry air
# Additional fixed model parameters
N_STATE_VARS = 7
STATE_VARS = ['z', 'P', 'T', 'wv', 'wc', 'wi', 'S']
STATE_VARS = ["z", "P", "T", "wv", "wc", "wi", "S"]
STATE_VAR_MAP = {var: i for i, var in enumerate(STATE_VARS)}
# Read the standard atmosphere CSV file
_std_atm_fn = pkg_resources.resource_filename("pyrcel",
"data/std_atm.csv")
_std_atm_fn = pkg_resources.resource_filename("pyrcel", "data/std_atm.csv")
std_atm = pd.read_csv(_std_atm_fn, delim_whitespace=True)

@@ -9,4 +9,2 @@ """ Collection of classes for representing aerosol size distributions.

"""
from builtins import zip
from builtins import object
from abc import ABCMeta, abstractmethod

@@ -16,26 +14,28 @@

from scipy.special import erf, erfinv
from future.utils import with_metaclass
class BaseDistribution(with_metaclass(ABCMeta, object)):
""" Interface for distributions, to ensure that they contain a pdf method.
"""
class BaseDistribution(metaclass=ABCMeta):
"""Interface for distributions, to ensure that they contain a pdf method."""
@abstractmethod
def cdf(self, x):
""" Cumulative density function """
"""Cumulative density function"""
@abstractmethod
def pdf(self, x):
""" Probability density function. """
"""Probability density function."""
@property
@abstractmethod
def stats(self):
pass
@abstractmethod
def __repr__(self):
""" Representation function. """
"""Representation function."""
class Gamma(BaseDistribution):
""" Gamma size distribution
"""Gamma size distribution"""
"""
# TODO: Implement Gamma size distribution

@@ -46,3 +46,3 @@ pass

class Lognorm(BaseDistribution):
""" Lognormal size distribution.
"""Lognormal size distribution.

@@ -91,14 +91,14 @@ An instance of :class:`Lognorm` contains a construction of a lognormal distribution

self.log = np.log
elif self.base == 10.:
elif self.base == 10.0:
self.log = np.log10
else:
self.log_base = np.log(self.base)
self.log = lambda x: np.log(x)/self.log_base
self.log = lambda x: np.log(x) / self.log_base
# Compute moments
self.median = self.mu
self.mean = self.mu*np.exp(0.5*self.sigma**2)
self.mean = self.mu * np.exp(0.5 * self.sigma**2)
def invcdf(self, y):
""" Inverse of cumulative density function.
"""Inverse of cumulative density function.

@@ -119,7 +119,7 @@ Parameters

erfinv_arg = 2.*y/self.N - 1.
return self.mu*np.exp(np.log(self.sigma) * np.sqrt(2.) * erfinv(erfinv_arg))
erfinv_arg = 2.0 * y / self.N - 1.0
return self.mu * np.exp(np.log(self.sigma) * np.sqrt(2.0) * erfinv(erfinv_arg))
def cdf(self, x):
""" Cumulative density function
"""Cumulative density function

@@ -139,7 +139,7 @@ .. math::

"""
erf_arg = (self.log(x/self.mu))/(np.sqrt(2.0)*self.log(self.sigma))
return (self.N/2.0)*(1.0+erf(erf_arg))
erf_arg = (self.log(x / self.mu)) / (np.sqrt(2.0) * self.log(self.sigma))
return (self.N / 2.0) * (1.0 + erf(erf_arg))
def pdf(self, x):
""" Probability density function
"""Probability density function

@@ -159,8 +159,8 @@ .. math::

"""
scaling = self.N/(np.sqrt(2.0*np.pi)*self.log(self.sigma))
exponent = ((self.log(x/self.mu))**2)/(2.0*(self.log(self.sigma))**2)
return (scaling/x)*np.exp(-exponent)
scaling = self.N / (np.sqrt(2.0 * np.pi) * self.log(self.sigma))
exponent = ((self.log(x / self.mu)) ** 2) / (2.0 * (self.log(self.sigma)) ** 2)
return (scaling / x) * np.exp(-exponent)
def moment(self, k):
""" Compute the k-th moment of the lognormal distribution
"""Compute the k-th moment of the lognormal distribution

@@ -180,9 +180,8 @@ .. math::

"""
scaling = (self.mu**k)*self.N
exponent = (((k**2)/2.)*(self.log(self.sigma))**2)
return scaling*np.exp(exponent)
scaling = (self.mu**k) * self.N
exponent = ((k**2) / 2.0) * (self.log(self.sigma)) ** 2
return scaling * np.exp(exponent)
@property
def stats(self):
""" Compute useful statistics for a lognormal distribution
"""Compute useful statistics for a lognormal distribution

@@ -198,13 +197,14 @@ Returns

stats_dict = dict()
stats_dict['mean_radius'] = self.mu*np.exp(0.5*self.sigma**2)
stats_dict["mean_radius"] = self.mu * np.exp(0.5 * self.sigma**2)
stats_dict['total_diameter'] = self.N*stats_dict['mean_radius']
stats_dict['total_surface_area'] = 4.*np.pi*self.moment(2.0)
stats_dict['total_volume'] = (4.*np.pi/3.)*self.moment(3.0)
stats_dict["total_diameter"] = self.N * stats_dict["mean_radius"]
stats_dict["total_surface_area"] = 4.0 * np.pi * self.moment(2.0)
stats_dict["total_volume"] = (4.0 * np.pi / 3.0) * self.moment(3.0)
stats_dict['mean_surface_area'] = stats_dict['total_surface_area']/self.N
stats_dict['mean_volume'] = stats_dict['total_volume']/self.N
stats_dict["mean_surface_area"] = stats_dict["total_surface_area"] / self.N
stats_dict["mean_volume"] = stats_dict["total_volume"] / self.N
stats_dict['effective_radius'] = \
stats_dict['total_volume']/stats_dict['total_surface_area']
stats_dict["effective_radius"] = (
stats_dict["total_volume"] / stats_dict["total_surface_area"]
)

@@ -220,3 +220,3 @@ return stats_dict

class MultiModeLognorm(BaseDistribution):
""" Multimode lognormal distribution class.
"""Multimode lognormal distribution class.

@@ -228,5 +228,5 @@ Container for multiple Lognorm classes representing a full aerosol size

def __init__(self, mus, sigmas, Ns, base=np.e):
dist_params = list(zip(mus, sigmas, Ns))
from operator import itemgetter
dist_params = sorted(dist_params, key=itemgetter(0))

@@ -249,6 +249,13 @@

def stats(self):
"""Compute useful statistics for a multi-mode lognormal distribution
TODO: Implement multi-mode lognorm stats
"""
raise NotImplementedError()
def __repr__(self):
mus_str = "("+", ".join("%2.2e" % mu for mu in self.mus)+")"
sigmas_str = "("+", ".join("%2.2e" % sigma for sigma in self.sigmas)+")"
Ns_str = "("+", ".join("%2.2e" % N for N in self.Ns)+")"
mus_str = "(" + ", ".join("%2.2e" % mu for mu in self.mus) + ")"
sigmas_str = "(" + ", ".join("%2.2e" % sigma for sigma in self.sigmas) + ")"
Ns_str = "(" + ", ".join("%2.2e" % N for N in self.Ns) + ")"
return "MultiModeLognorm| mus = {}, sigmas = {}, Totals = {} |".format(

@@ -258,2 +265,3 @@ mus_str, sigmas_str, Ns_str

#: Single mode aerosols

@@ -263,15 +271,15 @@ # TODO: Re-factor saved size distributions into a resource like 'constants'

FN2005_single_modes = {
'SM1': Lognorm(0.025/2, 1.3, 100.0),
'SM2': Lognorm(0.025/2, 1.3, 500.0),
'SM3': Lognorm(0.05/2, 1.8, 500.0),
'SM4': Lognorm(0.25/2, 1.8, 100.0),
'SM5': Lognorm(0.75/2, 1.8, 1000.0),
"SM1": Lognorm(0.025 / 2, 1.3, 100.0),
"SM2": Lognorm(0.025 / 2, 1.3, 500.0),
"SM3": Lognorm(0.05 / 2, 1.8, 500.0),
"SM4": Lognorm(0.25 / 2, 1.8, 100.0),
"SM5": Lognorm(0.75 / 2, 1.8, 1000.0),
}
NS2003_single_modes = {
'SM1': Lognorm(0.02/2, 2.5, 200.0),
'SM2': Lognorm(0.02/2, 2.5, 1000.0),
'SM3': Lognorm(0.02/2, 1.5, 1000.0),
'SM4': Lognorm(0.2/2, 2.5, 200.0),
'SM5': Lognorm(0.02/2, 2.5, 10000.0),
"SM1": Lognorm(0.02 / 2, 2.5, 200.0),
"SM2": Lognorm(0.02 / 2, 2.5, 1000.0),
"SM3": Lognorm(0.02 / 2, 1.5, 1000.0),
"SM4": Lognorm(0.2 / 2, 2.5, 200.0),
"SM5": Lognorm(0.02 / 2, 2.5, 10000.0),
}

@@ -282,10 +290,22 @@

# mu = micron, N = cm**-3
'marine': [Lognorm(0.01/2., 1.6, 340.), Lognorm(0.07/2., 2., 6.0),
Lognorm(0.62/2., 2.7, 3.1)],
'continental': [Lognorm(0.016/2., 1.6, 1000.), Lognorm(0.068/2., 2.1, 800.),
Lognorm(0.92/2., 2.2, 0.72)],
'background': [Lognorm(0.01/2., 1.7, 6400.), Lognorm(0.076/2., 2., 2300.0),
Lognorm(1.02/2., 2.16, 3.2)],
'urban': [Lognorm(0.014/2., 1.8, 10600.), Lognorm(0.054/2., 2.16, 32000.0),
Lognorm(0.86/2., 2.21, 5.4)]
"marine": [
Lognorm(0.01 / 2.0, 1.6, 340.0),
Lognorm(0.07 / 2.0, 2.0, 6.0),
Lognorm(0.62 / 2.0, 2.7, 3.1),
],
"continental": [
Lognorm(0.016 / 2.0, 1.6, 1000.0),
Lognorm(0.068 / 2.0, 2.1, 800.0),
Lognorm(0.92 / 2.0, 2.2, 0.72),
],
"background": [
Lognorm(0.01 / 2.0, 1.7, 6400.0),
Lognorm(0.076 / 2.0, 2.0, 2300.0),
Lognorm(1.02 / 2.0, 2.16, 3.2),
],
"urban": [
Lognorm(0.014 / 2.0, 1.8, 10600.0),
Lognorm(0.054 / 2.0, 2.16, 32000.0),
Lognorm(0.86 / 2.0, 2.21, 5.4),
],
}

@@ -295,20 +315,38 @@

jaenicke_distributions = {
"Polar": MultiModeLognorm(mus=(0.0689, 0.375, 4.29),
sigmas=(10**0.245, 10**0.300, 10**0.291),
Ns=(21.7, 0.186, 3.04e-4), base=10.),
"Urban": MultiModeLognorm(mus=(0.00651, 0.00714, 0.0248),
sigmas=(10.**0.245, 10.**0.666, 10.**0.337),
Ns=(9.93e4, 1.11e3, 3.64e4), base=10.),
"Background": MultiModeLognorm(mus=(0.0036, 0.127, 0.259),
sigmas=(10.**0.645, 10.**0.253, 10.**0.425),
Ns=(129., 59.7, 63.5), base=10.),
"Maritime": MultiModeLognorm(mus=(0.0039, 0.133, 0.29),
sigmas=(10.**0.657, 10.**0.210, 10.**0.396),
Ns=(133., 66.6, 3.06), base=10.),
"Remote Continental": MultiModeLognorm(mus=(0.01, 0.058, 0.9),
sigmas=(10.**0.161, 10.**0.217, 10.**0.38),
Ns=(3.2e3, 2.9e3, 0.3), base=10.),
"Rural": MultiModeLognorm(mus=(0.00739, 0.0269, 0.0149),
sigmas=(10.**0.225, 10.**0.557, 10.**0.266),
Ns=(6.65e3, 147., 1990.), base=10.),
"Polar": MultiModeLognorm(
mus=(0.0689, 0.375, 4.29),
sigmas=(10**0.245, 10**0.300, 10**0.291),
Ns=(21.7, 0.186, 3.04e-4),
base=10.0,
),
"Urban": MultiModeLognorm(
mus=(0.00651, 0.00714, 0.0248),
sigmas=(10.0**0.245, 10.0**0.666, 10.0**0.337),
Ns=(9.93e4, 1.11e3, 3.64e4),
base=10.0,
),
"Background": MultiModeLognorm(
mus=(0.0036, 0.127, 0.259),
sigmas=(10.0**0.645, 10.0**0.253, 10.0**0.425),
Ns=(129.0, 59.7, 63.5),
base=10.0,
),
"Maritime": MultiModeLognorm(
mus=(0.0039, 0.133, 0.29),
sigmas=(10.0**0.657, 10.0**0.210, 10.0**0.396),
Ns=(133.0, 66.6, 3.06),
base=10.0,
),
"Remote Continental": MultiModeLognorm(
mus=(0.01, 0.058, 0.9),
sigmas=(10.0**0.161, 10.0**0.217, 10.0**0.38),
Ns=(3.2e3, 2.9e3, 0.3),
base=10.0,
),
"Rural": MultiModeLognorm(
mus=(0.00739, 0.0269, 0.0149),
sigmas=(10.0**0.225, 10.0**0.557, 10.0**0.266),
Ns=(6.65e3, 147.0, 1990.0),
base=10.0,
),
}

@@ -13,18 +13,28 @@ """ Utilities for driving sets of parcel model integration strategies.

"""
from __future__ import print_function
from __future__ import absolute_import
from builtins import range
from numpy import empty, nan
from pandas import DataFrame
from .parcel import ParcelModel, ParcelModelError
from .activation import mbn2014, arg2000
from pyrcel.util import ParcelModelError
from .activation import arg2000, mbn2014
from .parcel import ParcelModel
def run_model(V, initial_aerosols, T, P, dt, S0=-0.0, max_steps=1000,
t_end=500., solver='lsoda', output_fmt='smax', terminate=False,
solver_kws=None, model_kws=None):
""" Setup and run the parcel model with given solver configuration.
def run_model(
V,
initial_aerosols,
T,
P,
dt,
S0=-0.0,
max_steps=1000,
t_end=500.0,
solver="lsoda",
output_fmt="smax",
terminate=False,
solver_kws=None,
model_kws=None,
):
"""Setup and run the parcel model with given solver configuration.
Parameters

@@ -72,8 +82,15 @@ ----------

if V <= 0:
return 0.
return 0.0
try:
model = ParcelModel(initial_aerosols, V, T, S0, P, **model_kws)
Smax = model.run(t_end, dt, max_steps, solver=solver,
output_fmt=output_fmt, terminate=terminate, **solver_kws)
Smax = model.run(
t_end,
dt,
max_steps,
solver=solver,
output_fmt=output_fmt,
terminate=terminate,
**solver_kws
)
except ParcelModelError:

@@ -84,6 +101,16 @@ return None

def iterate_runs(V, initial_aerosols, T, P, S0=-0.0, dt=0.01, dt_iters=2,
t_end=500., max_steps=500, output_fmt='smax',
fail_easy=True):
""" Iterate through several different strategies for integrating the parcel model.
def iterate_runs(
V,
initial_aerosols,
T,
P,
S0=-0.0,
dt=0.01,
dt_iters=2,
t_end=500.0,
max_steps=500,
output_fmt="smax",
fail_easy=True,
):
"""Iterate through several different strategies for integrating the parcel model.

@@ -133,3 +160,3 @@ As long as `fail_easy` is set to `False`, the strategies this method implements are:

if V <= 0:
return 0., 0., 0.
return 0.0, 0.0, 0.0

@@ -140,3 +167,3 @@ # Check that there are actually aerosols to deal with

if aerosol_N[0] < 0.01:
return -9999., -9999., -9999.
return -9999.0, -9999.0, -9999.0
else:

@@ -152,3 +179,3 @@ new_aerosols = []

dt_orig = dt*1.
dt_orig = dt * 1.0
finished = False

@@ -159,15 +186,37 @@ S_max = None

print(" Trying CVODE with default tolerance")
S_max = run_model(V, aerosols, T, P, dt, S0=S0, max_steps=2000, solver='cvode',
t_end=t_end, output_fmt=output_fmt,
solver_kws={'iter': 'Newton', 'time_limit': 10.0,
'linear_solver': "DENSE"})
S_max = run_model(
V,
aerosols,
T,
P,
dt,
S0=S0,
max_steps=2000,
solver="cvode",
t_end=t_end,
output_fmt=output_fmt,
solver_kws={
"iter": "Newton",
"time_limit": 10.0,
"linear_solver": "DENSE",
},
)
# Strategy 2: Iterate over some increasingly relaxed tolerances for LSODA.
if (S_max is None) and not fail_easy:
while dt > dt_orig/(2**dt_iters):
while dt > dt_orig / (2**dt_iters):
print(" Trying LSODA, dt = %1.3e, max_steps = %d" % (dt, max_steps))
S_max = run_model(V, aerosols, T, P, dt, S0, max_steps, solver='lsoda',
t_end=t_end)
S_max = run_model(
V,
aerosols,
T,
P,
dt,
S0,
max_steps,
solver="lsoda",
t_end=t_end,
)
if not S_max:
dt /= 2.
dt /= 2.0
print(" Retrying...")

@@ -182,13 +231,25 @@ else:

print(" Trying LSODE")
S_max = run_model(V, aerosols, T, P, dt_orig, max_steps=1000, solver='lsode',
t_end=t_end, S0=S0)
S_max = run_model(
V,
aerosols,
T,
P,
dt_orig,
max_steps=1000,
solver="lsode",
t_end=t_end,
S0=S0,
)
# Strategy 4: If all else fails return -9999.
if (S_max is None):
if S_max is None:
if output_fmt == "smax":
S_max = -9999.
S_max = -9999.0
elif output_fmt == "arrays":
S_max = empty([0]), empty([0])
elif output_fmt == "dataframes":
S_max = DataFrame(data={"S": [nan]}), DataFrame(data={"aerosol1": [nan]})
S_max = (
DataFrame(data={"S": [nan]}),
DataFrame(data={"aerosol1": [nan]}),
)
else:

@@ -199,1 +260,3 @@ S_max = nan

return S_max, S_max_arg, S_max_fn
return S_max, S_max_arg, S_max_fn
return S_max, S_max_arg, S_max_fn
# -*- coding: utf-8 -*-
""" Interface to numerical ODE solvers.
"""
from __future__ import print_function
from __future__ import absolute_import
from builtins import object
from future.utils import with_metaclass
from abc import ABCMeta, abstractmethod
import time
import numpy as np
import warnings
import sys
import sys
# Compatibility - timer functions

@@ -18,2 +10,7 @@ # In Python 3, the more accurate `time.process_time()` method is available. But

import time
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
if sys.version_info[0] < 3:

@@ -26,25 +23,28 @@ timer = time.clock

available_integrators = ['odeint']
available_integrators = ["odeint"]
try:
from odespy.odepack import Lsode, Lsoda
from odespy import Vode
available_integrators.extend(['lsode', 'lsoda', 'vode'])
from odespy.odepack import Lsoda, Lsode
available_integrators.extend(["lsode", "lsoda", "vode"])
except ImportError:
warnings.warn("Could not import odespy package; "
"invoking the 'lsoda' or 'lsode' options will fail!")
warnings.warn(
"Could not import odespy package; "
"invoking the 'lsoda' or 'lsode' options will fail!"
)
try:
# from assimulo.solvers.odepack import LSODAR
from assimulo.exception import TimeLimitExceeded
from assimulo.problem import Explicit_Problem
from assimulo.solvers.sundials import CVode, CVodeError
# from assimulo.solvers.odepack import LSODAR
from assimulo.exception import TimeLimitExceeded
available_integrators.extend(['cvode', 'lsodar'])
available_integrators.extend(["cvode", "lsodar"])
except ImportError:
warnings.warn("Could not import Assimulo; "
"invoking the CVode solver will fail!")
warnings.warn("Could not import Assimulo; " "invoking the CVode solver will fail!")
__all__ = ['Integrator', ]
__all__ = ["Integrator"]

@@ -55,3 +55,3 @@ state_atol = [1e-4, 1e-4, 1e-4, 1e-10, 1e-10, 1e-4, 1e-8]

class Integrator(with_metaclass(ABCMeta, object)):
class Integrator(metaclass=ABCMeta):
"""

@@ -66,4 +66,3 @@ Container class for the various integrators to use in the parcel model.

def __init__(self, rhs, output_dt, solver_dt, y0, args, t0=0., console=False):
def __init__(self, rhs, output_dt, solver_dt, y0, args, t0=0.0, console=False):
self.output_dt = output_dt

@@ -93,4 +92,3 @@ self.solver_dt = solver_dt

def solver(method):
""" Maps a solver name to a function.
"""
"""Maps a solver name to a function."""
solvers = {

@@ -106,4 +104,3 @@ # # SciPy interfaces

# 'lsodar': partial(Integrator._solve_with_assimulo, method='lsodar'),
'cvode': CVODEIntegrator,
"cvode": CVODEIntegrator
}

@@ -120,12 +117,9 @@

class ExtendedProblem(Explicit_Problem):
""" This extension of the Assimulo 'Explicit_Problem' class
"""This extension of the Assimulo 'Explicit_Problem' class
encodes some of the logic particular to the parcel model simulation,
specifically rules for terminating the simulation and detecting
events such as the maximum supersaturation occurring """
events such as the maximum supersaturation occurring"""
name = 'Parcel model ODEs'
sw0 = [
True, # Normal integration switch
False, # Past cut-off switch
]
name = "Parcel model ODEs"
sw0 = [True, False] # Normal integration switch # Past cut-off switch
t_cutoff = 1e5

@@ -138,3 +132,3 @@ dS_dt = 1.0

self.V = rhs_args[3]
self.terminate_time = terminate_depth/self.V
self.terminate_time = terminate_depth / self.V
super(Explicit_Problem, self).__init__(*args, **kwargs)

@@ -152,3 +146,3 @@

# with the correct one for now, but leave a record of this change
# Daniel Rothenberg <darothen@mit.edu> - 2/15/2016
# Daniel Rothenberg <daniel@danielrothenberg.com> - 2/15/2016
# dode_dt = np.zeros(c.N_STATE_VARS + self.args[0]) # FROM INIT ARGS

@@ -160,4 +154,4 @@ dode_dt = np.zeros(c.N_STATE_VARS + self.rhs_args[0])

def state_events(self, t, y, sw):
""" Check whether an 'event' has occurred. We want to see if the
supersaturation is decreasing or not. """
"""Check whether an 'event' has occurred. We want to see if the
supersaturation is decreasing or not."""
if sw[0]:

@@ -174,4 +168,4 @@ smax_event = self.dS_dt

def handle_event(self, solver, event_info):
""" Event handling. This function is called when Assimulo finds
an event as specified by the event function. """
"""Event handling. This function is called when Assimulo finds
an event as specified by the event function."""
event_info = event_info[0] # Only state events, event_info[1] is time events

@@ -188,7 +182,17 @@ if event_info[0] != 0:

class CVODEIntegrator(Integrator):
kwargs = None # Save the kwargs used for setting up the interface to CVODE!
def __init__(self, rhs, output_dt, solver_dt, y0, args, t0=0., console=False,
terminate=False, terminate_depth=100., **kwargs):
def __init__(
self,
rhs,
output_dt,
solver_dt,
y0,
args,
t0=0.0,
console=False,
terminate=False,
terminate_depth=100.0,
**kwargs
):
self.terminate = terminate

@@ -201,4 +205,5 @@ super(CVODEIntegrator, self).__init__(

if terminate:
self.prob = ExtendedProblem(self.rhs, self.args, terminate_depth,
y0=self.y0)
self.prob = ExtendedProblem(
self.rhs, self.args, terminate_depth, y0=self.y0
)
else:

@@ -212,3 +217,3 @@ self.prob = Explicit_Problem(self.rhs, self.y0)

def _setup_sim(self, **kwargs):
""" Create a simulation interface to Assimulo using CVODE, given
"""Create a simulation interface to Assimulo using CVODE, given
a problem definition

@@ -220,3 +225,3 @@

sim = CVode(self.prob)
sim.discr = 'BDF'
sim.discr = "BDF"
sim.maxord = 5

@@ -227,21 +232,21 @@

# the number of anticipated tuning knobs is small.
if 'maxh' in kwargs:
sim.maxh = kwargs['maxh']
if "maxh" in kwargs:
sim.maxh = kwargs["maxh"]
else:
sim.maxh = np.min([0.1, self.output_dt])
if 'minh' in kwargs:
sim.minh = kwargs['minh']
if "minh" in kwargs:
sim.minh = kwargs["minh"]
# else: sim.minh = 0.001
if "iter" in kwargs:
sim.iter = kwargs['iter']
sim.iter = kwargs["iter"]
else:
sim.iter = 'Newton'
sim.iter = "Newton"
if "linear_solver" in kwargs:
sim.linear_solver = kwargs['linear_solver']
sim.linear_solver = kwargs["linear_solver"]
if "max_steps" in kwargs: # DIFFERENT NAME!!!!
sim.maxsteps = kwargs['max_steps']
sim.maxsteps = kwargs["max_steps"]
else:

@@ -251,3 +256,3 @@ sim.maxsteps = 1000

if "time_limit" in kwargs:
sim.time_limit = kwargs['time_limit']
sim.time_limit = kwargs["time_limit"]
sim.report_continuously = True

@@ -263,3 +268,3 @@ else:

sim.rtol = state_rtol
sim.atol = state_atol + [1e-12]*nr
sim.atol = state_atol + [1e-12] * nr

@@ -276,3 +281,2 @@ if not self.console:

def integrate(self, t_end, **kwargs):
# Compute integration logic. We need to know:

@@ -282,3 +286,3 @@ # 1) How are we iterating the solver loop?

# 2) How many points do we want to interpolate for output?
n_out = int(self.solver_dt/self.output_dt)
n_out = int(self.solver_dt / self.output_dt)
t_current = self.t0

@@ -291,10 +295,10 @@

print(" step time walltime Δwalltime | z T S")
print(" "
"------------------------------------|----------------------")
step_fmt = " {:5d} {:7.2f}s {:7.2f}s {:8.2f}s |" \
" {:5.1f} {:7.2f} {:6.2f}%"
print(" " "------------------------------------|----------------------")
step_fmt = (
" {:5d} {:7.2f}s {:7.2f}s {:8.2f}s |" " {:5.1f} {:7.2f} {:6.2f}%"
)
txs, xxs = [], []
n_steps = 1
total_walltime = 0.
total_walltime = 0.0
now = timer()

@@ -309,11 +313,19 @@ while t_current < t_end:

state = self.y0 if n_steps == 1 else xxs[-1][-1]
_z = state[c.STATE_VAR_MAP['z']]
_T = state[c.STATE_VAR_MAP['T']]
_S = state[c.STATE_VAR_MAP['S']]*100
print(step_fmt.format(n_steps, t_current, total_walltime,
delta_walltime, _z, _T, _S))
_z = state[c.STATE_VAR_MAP["z"]]
_T = state[c.STATE_VAR_MAP["T"]]
_S = state[c.STATE_VAR_MAP["S"]] * 100
print(
step_fmt.format(
n_steps,
t_current,
total_walltime,
delta_walltime,
_z,
_T,
_S,
)
)
try:
now = timer()
out_list = np.linspace(t_current, t_current + t_increment,
n_out + 1)
out_list = np.linspace(t_current, t_current + t_increment, n_out + 1)
tx, xx = self.sim.simulate(t_current + t_increment, 0, out_list)

@@ -320,0 +332,0 @@ except CVodeError as e:

@@ -1,29 +0,20 @@

from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import range
import os.path
import pickle
from datetime import datetime as ddt
import pickle
import os.path
import numpy as np
import pandas as pd
try:
import xarray
_XARRAY = True
except ImportError:
_XARRAY = False
import xarray as xr
from .thermo import es, rho_air
from .postprocess import simulation_activation
from .version import __version__ as ver
from . import __version__ as ver
from . import constants as c
from .thermo import rho_air
from .util import ParcelModelError
#: Acceptable output formats
OUTPUT_FORMATS = [ 'nc', 'obj', 'csv', ]
OUTPUT_FORMATS = ["nc", "obj", "csv"]
def get_timestamp(fmt="%m%d%y_%H%M%S"):
""" Get current timestamp in MMDDYY_hhmmss format. """
"""Get current timestamp in MMDDYY_hhmmss format."""

@@ -35,6 +26,13 @@ current_time = ddt.now()

def write_parcel_output(filename=None, format=None, parcel=None,
parcel_df=None, aerosol_dfs=None, other_dfs=None):
""" Write model output to disk.
def write_parcel_output(
filename=None,
format=None,
parcel=None,
parcel_df=None,
aerosol_dfs=None,
other_dfs=None,
):
"""Write model output to disk.
Wrapper for methods to write parcel model output to disk.

@@ -44,6 +42,6 @@

----------
filename : str
filename : str
Full filename to write output; if not supplied, will default
to the current timestamp
format : str
format : str
Format to use from ``OUTPUT_FORMATS``; must be supplied if no

@@ -57,3 +55,3 @@ filename is provided

Aerosol size history
other_dfs : list of DataFrames
other_dfs : list of DataFrames
Additional DataFrames to include in output; must have the same index

@@ -67,3 +65,3 @@ as the parcel's results when transformed to a DataFrame!

raise ParcelModelError("Must supply either a filename or format.")
if not (format in OUTPUT_FORMATS):
if format not in OUTPUT_FORMATS:
raise ParcelModelError("Please supply a format from %r" % OUTPUT_FORMATS)

@@ -74,17 +72,18 @@ basename = get_timestamp()

basename, extension = os.path.splitext(filename)
extension = extension[1:] # strip '.'
if not (extension in OUTPUT_FORMATS):
extension = format = 'obj'
extension = extension[1:] # strip '.'
if extension not in OUTPUT_FORMATS:
extension = format = "obj"
else:
format = extension
if parcel.console:
print()
print("Saving output to %s format with base filename %s" % (extension, basename))
print(
"Saving output to %s format with base filename %s" % (extension, basename)
)
print()
#filename = "%s.%s" % (basename, extension)
# filename = "%s.%s" % (basename, extension)
# Sanity - check, either we need the dataframes themselves or
# Sanity - check, either we need the dataframes themselves or
# we need the model

@@ -94,6 +93,6 @@ if (parcel_df is None) and (aerosol_dfs is None):

raise ValueError("Need to supply either dataframes or model")
else:
else:
parcel_df, aerosol_dfs = parcel_to_dataframes(parcel)
# Concatenate on the additional dataframes supplied by the user
if not (other_dfs is None):
if other_dfs is not None:
for df in other_dfs:

@@ -103,6 +102,5 @@ parcel_df = pd.concat([parcel_df, df], axis=1)

# 1) csv
if format == 'csv':
if format == "csv":
# Write parcel data
parcel_df.to_csv("%s_%s.%s" % (basename, 'parcel', extension))
parcel_df.to_csv("%s_%s.%s" % (basename, "parcel", extension))

@@ -114,83 +112,131 @@ # Write aerosol data

# 2) nc
elif format == 'nc':
elif format == "nc":
## Construct xarray datastructure to write to netCDF
ds = xr.Dataset(attrs={"Conventions": "CF-1.0", "source": "pyrcel v%s" % ver})
if not _XARRAY:
raise ValueError("Module `xarray` must be installed to output "
"to netcdf!")
ds.coords["time"] = (
"time",
parcel.time,
{"units": "seconds", "long_name": "simulation time"},
)
## Construct xray datastructure to write to netCDF
ds = xarray.Dataset(attrs={
'Conventions': "CF-1.0",
'source': "pyrcel v%s" % ver,
})
ds.coords['time'] = ('time', parcel.time,
{ 'units': 'seconds', 'long_name': 'simulation time' })
## Aerosol coordinates and basic data
for aerosol in parcel.aerosols:
if parcel.console: print(aerosol)
if parcel.console:
print(aerosol)
nr = aerosol.nr
r_drys = aerosol.r_drys*1e6
kappas = [aerosol.kappa, ]*nr
Nis = aerosol.Nis*1e-6
r_drys = aerosol.r_drys * 1e6
kappas = [aerosol.kappa] * nr
Nis = aerosol.Nis * 1e-6
species = aerosol.species
aer_coord = '%s_bins' % species
aer_coord = "%s_bins" % species
ds.coords[aer_coord] = (aer_coord,
np.array(list(range(1, aerosol.nr+1)), dtype=np.int32),
{ 'long_name': '%s size bin number' % species } )
ds['%s_rdry' % species] = ((aer_coord, ), r_drys,
{ 'units': 'micron',
'long_name': '%s bin dry radii' % species })
ds['%s_kappas' % species] = ((aer_coord, ), kappas,
{ 'long_name': '%s bin kappa-kohler hygroscopicity' % species })
ds['%s_Nis' % species] = ((aer_coord, ), Nis,
{ 'units': 'cm-3',
'long_name': '%s bin number concentration' % species })
ds.coords[aer_coord] = (
aer_coord,
np.array(list(range(1, aerosol.nr + 1)), dtype=np.int32),
{"long_name": "%s size bin number" % species},
)
ds["%s_rdry" % species] = (
(aer_coord,),
r_drys,
{"units": "micron", "long_name": "%s bin dry radii" % species},
)
ds["%s_kappas" % species] = (
(aer_coord,),
kappas,
{"long_name": "%s bin kappa-kohler hygroscopicity" % species},
)
ds["%s_Nis" % species] = (
(aer_coord,),
Nis,
{
"units": "cm-3",
"long_name": "%s bin number concentration" % species,
},
)
size_data = aerosol_dfs[species].values*1e6
ds['%s_size' % species] = (('time', aer_coord), size_data,
{ 'units': 'micron',
'long_name': '%s bin wet radii' % species })
size_data = aerosol_dfs[species].values * 1e6
ds["%s_size" % species] = (
("time", aer_coord),
size_data,
{"units": "micron", "long_name": "%s bin wet radii" % species},
)
## Parcel data
ds['S'] = (('time', ), parcel_df['S']*100.,
{ 'units': "%" ,
'long_name': "Supersaturation" })
ds['T'] = (('time', ), parcel_df['T'],
{ 'units': "K", "long_name": "Temperature" })
ds['P'] = (('time', ), parcel_df['P'],
{ 'units': 'Pa', 'long_name': "Pressure" })
ds['wv'] = (('time', ), parcel_df['wv'],
{ 'units': 'kg/kg', 'long_name': "Water vapor mixing ratio" })
ds['wc'] = (('time', ), parcel_df['wc'],
{ 'units': 'kg/kg', 'long_name': "Liquid water mixing ratio" })
ds['wi'] = (('time', ), parcel_df['wi'],
{ 'units': 'kg/kg', 'long_name': "Ice water mixing ratio" })
ds['height'] = (('time', ), parcel_df['z'],
{ 'units': "meters", 'long_name': "Parcel height above start" })
ds['rho'] = (('time', ), parcel_df['rho'],
{ 'units': "kg/m3", 'long_name': "Air density"})
ds["S"] = (
("time",),
parcel_df["S"] * 100.0,
{"units": "%", "long_name": "Supersaturation"},
)
ds["T"] = (
("time",),
parcel_df["T"],
{"units": "K", "long_name": "Temperature"},
)
ds["P"] = (
("time",),
parcel_df["P"],
{"units": "Pa", "long_name": "Pressure"},
)
ds["wv"] = (
("time",),
parcel_df["wv"],
{"units": "kg/kg", "long_name": "Water vapor mixing ratio"},
)
ds["wc"] = (
("time",),
parcel_df["wc"],
{"units": "kg/kg", "long_name": "Liquid water mixing ratio"},
)
ds["wi"] = (
("time",),
parcel_df["wi"],
{"units": "kg/kg", "long_name": "Ice water mixing ratio"},
)
ds["height"] = (
("time",),
parcel_df["z"],
{"units": "meters", "long_name": "Parcel height above start"},
)
ds["rho"] = (
("time",),
parcel_df["rho"],
{"units": "kg/m3", "long_name": "Air density"},
)
ds['wtot'] = (('time', ), parcel_df['wv'] + parcel_df['wc'],
{ 'units': 'kg/kg', 'long_name': "Total water mixing ratio" })
ds["wtot"] = (
("time",),
parcel_df["wv"] + parcel_df["wc"],
{"units": "kg/kg", "long_name": "Total water mixing ratio"},
)
if 'alpha' in parcel_df:
ds['alpha'] = (('time', ), parcel_df['alpha'],
{ 'long_name': "ratio of Nkn/Neq" })
if 'phi' in parcel_df:
ds['phi'] = (('time', ), parcel_df['phi'],
{ 'long_name': "fraction of not-strictly activated drops in Nkn" })
if 'eq' in parcel_df:
ds['eq'] = (('time', ), parcel_df['eq'],
{ 'long_name': "Equilibrium Kohler-activated fraction" })
if 'kn' in parcel_df:
ds['kn'] = (('time', ), parcel_df['kn'],
{ 'long_name': "Kinetic activated fraction" })
if "alpha" in parcel_df:
ds["alpha"] = (
("time",),
parcel_df["alpha"],
{"long_name": "ratio of Nkn/Neq"},
)
if "phi" in parcel_df:
ds["phi"] = (
("time",),
parcel_df["phi"],
{"long_name": "fraction of not-strictly activated drops in Nkn"},
)
if "eq" in parcel_df:
ds["eq"] = (
("time",),
parcel_df["eq"],
{"long_name": "Equilibrium Kohler-activated fraction"},
)
if "kn" in parcel_df:
ds["kn"] = (
("time",),
parcel_df["kn"],
{"long_name": "Kinetic activated fraction"},
)
## Save to disk
ds.to_netcdf(basename+".nc")
ds.to_netcdf(basename + ".nc")

@@ -201,7 +247,8 @@ # 3) obj (pickle)

with open(basename+".obj", 'w') as f:
with open(basename + ".obj", "w") as f:
pickle.dump(parcel, f)
def parcel_to_dataframes(parcel):
""" Convert model simulation to dataframe.
"""Convert model simulation to dataframe.

@@ -212,8 +259,8 @@ Parameters

Returns
Returns
-------
DataFrame and list of DataFrames
The first returned DataFrame will be the parcel model thermo/dynamical
output with keys ``T``, ``wv``, ``wc``, ``S``, ``z``, and indexed by
``time``. The list of DataFrames shares the same ``time`` index, but
The first returned DataFrame will be the parcel model thermo/dynamical
output with keys ``T``, ``wv``, ``wc``, ``S``, ``z``, and indexed by
``time``. The list of DataFrames shares the same ``time`` index, but
is divided up so that the radii of each aerosol species are tracked in

@@ -239,11 +286,11 @@ different containers.

parcel_out = pd.DataFrame({ var: x[:, i] for i, var in enumerate(c.STATE_VARS) },
index=time)
parcel_out = pd.DataFrame(
{var: x[:, i] for i, var in enumerate(c.STATE_VARS)}, index=time
)
## Add some thermodynamic output to the parcel model dataframe
parcel_out['rho'] = rho_air(parcel_out['T'], parcel_out['P'],
parcel_out['S'] + 1.)
parcel_out["rho"] = rho_air(parcel_out["T"], parcel_out["P"], parcel_out["S"] + 1.0)
aerosol_dfs = {}
species_shift = 0 # increment by nr to select the next aerosol's radii
species_shift = 0 # increment by nr to select the next aerosol's radii
for aerosol in parcel.aerosols:

@@ -256,7 +303,7 @@ nr = aerosol.nr

for i, label in enumerate(labels):
radii_dict[label] = x[:,c.N_STATE_VARS+species_shift+i]
radii_dict[label] = x[:, c.N_STATE_VARS + species_shift + i]
aerosol_dfs[species] = pd.DataFrame( radii_dict, index=time )
aerosol_dfs[species] = pd.DataFrame(radii_dict, index=time)
species_shift += nr
return parcel_out, aerosol_dfs
return parcel_out, aerosol_dfs
""" Main implementation of parcel model.
"""
from __future__ import print_function
from __future__ import absolute_import
from builtins import zip
from builtins import range
from builtins import object
import os
import numpy as np
from scipy.optimize import bisect

@@ -16,19 +11,12 @@

from . import output
from . aerosol import AerosolSpecies
# from . integrator import Integrator
from . thermo import *
__all__ = ['ParcelModel', ]
# Special import of derivative ODE rhs to main namespace
from ._parcel_aux_numba import parcel_ode_sys
from .aerosol import AerosolSpecies
from .thermo import Seq, es, kohler_crit, rho_air
from .util import ParcelModelError
__all__ = ["ParcelModel"]
class ParcelModelError(Exception):
""" Custom exception to throw during parcel model execution.
"""
def __init__(self, error_str):
self.error_str = error_str
def __str__(self):
return repr(self.error_str)
class ParcelModel(object):

@@ -107,5 +95,14 @@ """ Wrapper class for instantiating and running the parcel model.

def __init__(self, aerosols, V, T0, S0, P0, console=False, accom=c.ac,
truncate_aerosols=False):
""" Initialize the parcel model.
def __init__(
self,
aerosols,
V,
T0,
S0,
P0,
console=False,
accom=c.ac,
truncate_aerosols=False,
):
"""Initialize the parcel model.

@@ -151,3 +148,3 @@ Parameters

def set_initial_conditions(self, V=None, T0=None, S0=None, P0=None, aerosols=None):
""" Set the initial conditions and parameters for a new parcel
"""Set the initial conditions and parameters for a new parcel
model run without having to create a new :class:`ParcelModel` instance.

@@ -209,3 +206,3 @@

def _replace_aerosol(self, aerosols, smallest_rdry=1e-10):
""" Truncates the bins with the smallest particles so that none
"""Truncates the bins with the smallest particles so that none
have a dry radius of less than 0.1 nm.

@@ -232,11 +229,21 @@

if bin_count > 0:
aer_new = AerosolSpecies(aer.species,
aer.distribution,
kappa=aer.kappa,
bins=aer.bins,
r_min=smallest_rdry*1e6)
aer_new = AerosolSpecies(
aer.species,
aer.distribution,
kappa=aer.kappa,
bins=aer.bins,
r_min=smallest_rdry * 1e6,
)
N_new = np.sum(aer_new.Nis)
if self.console:
print("%s: Removed %03d bins, N change: %5.1f -> %5.1f (%2.1f%%)" %
(aer.species, bin_count, N_old, N_new, (N_new-N_old)/N_new))
print(
"%s: Removed %03d bins, N change: %5.1f -> %5.1f (%2.1f%%)"
% (
aer.species,
bin_count,
N_old,
N_new,
(N_new - N_old) / N_new,
)
)
fixed_aerosols.append(aer_new)

@@ -249,3 +256,3 @@ else:

def _setup_run(self):
""" Perform equilibration and initialization calculations for the
"""Perform equilibration and initialization calculations for the
parcel model simulation.

@@ -269,5 +276,5 @@

r_drys.extend(aerosol.r_drys)
kappas.extend([aerosol.kappa]*aerosol.nr)
kappas.extend([aerosol.kappa] * aerosol.nr)
Nis.extend(aerosol.Nis)
species.extend([aerosol.species]*aerosol.nr)
species.extend([aerosol.species] * aerosol.nr)

@@ -278,5 +285,5 @@ r_drys = np.array(r_drys)

out['r_drys'] = r_drys
out['kappas'] = kappas
out['Nis'] = Nis
out["r_drys"] = r_drys
out["kappas"] = kappas
out["Nis"] = Nis

@@ -289,3 +296,3 @@ if self.console:

print("%10s %2.2e %4.1f" % (sp, r, N))
print("\n"+"-"*44)
print("\n" + "-" * 44)

@@ -295,3 +302,5 @@ # 2) Setup parcel initial conditions

# RH * (Rd/Rv = epsilon) * ( es / P - es )
wv0 = (S0 + 1.)*(c.epsilon*es(T0-273.15)/(P0-es(T0-273.15))) # Water Vapor mixing ratio, kg/kg
wv0 = (S0 + 1.0) * (
c.epsilon * es(T0 - 273.15) / (P0 - es(T0 - 273.15))
) # Water Vapor mixing ratio, kg/kg

@@ -307,4 +316,4 @@ # b) find equilibrium wet particle radius

for r_dry, kappa in zip(reversed(r_drys), reversed(kappas)):
# Locate the critical value (f(r_crit) > 0), and use bisection from it and
# r_dry (f(r_dry) < 0) to find equilibrium wet particle radius for a given S0
# Locate the critical value (f(r_crit) > 0), and use bisection from it and
# r_dry (f(r_dry) < 0) to find equilibrium wet particle radius for a given S0
r_b, _ = kohler_crit(T0, r_dry, kappa)

@@ -321,3 +330,3 @@ r_a = r_dry

raised = False
for (r, r_dry, sp, kappa) in zip(r0s, r_drys, species, kappas):
for r, r_dry, sp, kappa in zip(r0s, r_drys, species, kappas):
ss = Seq(r, r_dry, T0, kappa)

@@ -329,3 +338,3 @@ # rc, _ = kohler_crit(T0, r_dry, kappa)

raised = True
if np.abs(ss-S0) > 1e-4:
if np.abs(ss - S0) > 1e-4:
if self.console:

@@ -335,9 +344,18 @@ print("Found S discrepancy", ss, S0, r_dry)

if raised:
raise ParcelModelError("Couldn't calculate initial aerosol population wet sizes.")
out['r0s'] = r0s
raise ParcelModelError(
"Couldn't calculate initial aerosol population wet sizes."
)
out["r0s"] = r0s
# c) compute equilibrium droplet water content
water_vol = lambda r0, r_dry, Ni: (4.*np.pi/3.)*rho_w*Ni*(r0**3 - r_dry**3)
wc0 = np.sum([water_vol(r0, r_dry, Ni) for r0, r_dry, Ni in zip(r0s, r_drys, Nis)])
wc0 /= rho_air(T0, P0, 0.)
water_vol = (
lambda r0, r_dry, Ni: (4.0 * np.pi / 3.0)
* c.rho_w
* Ni
* (r0**3 - r_dry**3)
)
wc0 = np.sum(
[water_vol(r0, r_dry, Ni) for r0, r_dry, Ni in zip(r0s, r_drys, Nis)]
)
wc0 /= rho_air(T0, P0, 0.0)

@@ -351,9 +369,21 @@ # d) compute initial ice water content

print("PARCEL INITIAL CONDITIONS")
print((" " + "{:>9} "*6).format("P (hPa)", "T (K)", "wv (g/kg)",
"wc (g/kg)", "wi (g/kg)", "S"))
print(" " + "{:9.1f} {:9.2f} {:9.1e} {:9.1e} {:9.1e} {:9.3f}".format(
P0/100., T0, wv0*1e3, wc0*1e3, wi0*1e3, S0))
print(
(" " + "{:>9} " * 6).format(
"P (hPa)",
"T (K)",
"wv (g/kg)",
"wc (g/kg)",
"wi (g/kg)",
"S",
)
)
print(
" "
+ "{:9.1f} {:9.2f} {:9.1e} {:9.1e} {:9.1e} {:9.3f}".format(
P0 / 100.0, T0, wv0 * 1e3, wc0 * 1e3, wi0 * 1e3, S0
)
)
y0.extend(r0s)
y0 = np.array(y0)
out['y0'] = y0
out["y0"] = y0
self.y0 = y0

@@ -373,7 +403,15 @@

def run(self, t_end,
output_dt=1., solver_dt=None,
max_steps=1000, solver="odeint", output_fmt="dataframes",
terminate=False, terminate_depth=100., **solver_args):
""" Run the parcel model simulation.
def run(
self,
t_end,
output_dt=1.0,
solver_dt=None,
max_steps=1000,
solver="odeint",
output_fmt="dataframes",
terminate=False,
terminate_depth=100.0,
**solver_args
):
"""Run the parcel model simulation.

@@ -459,9 +497,11 @@ Once the model has been instantiated, a simulation can immediately be

"""
from . integrator import Integrator
from .integrator import Integrator
if output_fmt not in ["dataframes", "arrays", "smax"]:
raise ParcelModelError("Invalid value ('%s') specified for output format." % output)
raise ParcelModelError(
"Invalid value ('%s') specified for output format." % output
)
if solver_dt is None:
solver_dt = 10.*output_dt
solver_dt = 10.0 * output_dt

@@ -479,24 +519,30 @@ if not self._model_set:

try:
from .parcel_aux import der as der_fcn
# Cython
# from .parcel_aux import der as rhs_fcn
# Numba - JIT
from ._parcel_aux_numba import parcel_ode_sys as rhs_fcn
# Numba - AOT
# from .parcel_aux_numba import parcel_ode_sys as rhs_fcn
except ImportError:
print("Could not load Cython derivative; using Python version.")
from .parcel import der as der_fcn
from .parcel import parcel_ode_sys as rhs_fcn
# Hack in Python derivative function
# der_fcn = der
# rhs_fcn = der
# Is the updraft speed a function of time?
v_is_func = hasattr(self.V, '__call__')
v_is_func = hasattr(self.V, "__call__")
if v_is_func: # Re-wrap the function to correctly figure out V
orig_der_fcn = der_fcn
orig_rhs_fcn = rhs_fcn
def der_fcn(y, t, *args):
def rhs_fcn(y, t, *args):
V_t = self.V(t)
args[3] = V_t
return orig_der_fcn(y, t, *args)
return orig_rhs_fcn(y, t, *args)
# Will the simulation terminate early?
if not terminate:
terminate_depth = 0.
terminate_depth = 0.0
else:
if terminate_depth <= 0.:
if terminate_depth <= 0.0:
raise ParcelModelError("`terminate_depth` must be greater than 0!")

@@ -510,3 +556,3 @@

print(" max solver dt: ", solver_dt)
print(" solver int steps: ", int(solver_dt/output_dt))
print(" solver int steps: ", int(solver_dt / output_dt))
print(" termination: %r (%5dm)" % (terminate, terminate_depth))

@@ -516,6 +562,13 @@

integrator_type = Integrator.solver(solver)
integrator = integrator_type(der_fcn, output_dt, solver_dt, y0, args,
terminate=terminate, terminate_depth=terminate_depth,
console=self.console,
**solver_args)
integrator = integrator_type(
rhs_fcn,
output_dt,
solver_dt,
y0,
args,
terminate=terminate,
terminate_depth=terminate_depth,
console=self.console,
**solver_args
)
success = False

@@ -540,3 +593,3 @@ try:

self.x = x
self.heights = self.x[:, c.STATE_VAR_MAP['z']]
self.heights = self.x[:, c.STATE_VAR_MAP["z"]]
self.time = t

@@ -549,10 +602,11 @@

elif output_fmt == "smax":
S = self.x[:, c.STATE_VAR_MAP['S']]
S = self.x[:, c.STATE_VAR_MAP["S"]]
return S.max()
def write_summary(self, parcel_data, aerosol_data, out_filename):
""" Write a quick and dirty summary of given parcel model output to the
"""Write a quick and dirty summary of given parcel model output to the
terminal.
"""
from .activation import lognormal_activation
# Check if parent dir of out_filename exists, and if not,

@@ -565,3 +619,3 @@ # create it

# Open a file to write to
with open(out_filename, 'w') as out_file:
with open(out_filename, "w") as out_file:
# Write header

@@ -580,3 +634,3 @@ out_file.write("PARCEL MODEL\n")

for aerosol in self.aerosols:
out_file.write(aerosol.species+" = "+aerosol.summary_str()+"\n")
out_file.write(aerosol.species + " = " + aerosol.summary_str() + "\n")
out_file.write("--------------------------------------\n")

@@ -586,3 +640,3 @@

# 1) Maximum supersaturation in parcel
S_max = parcel_data['S'].max()
S_max = parcel_data["S"].max()
S_max_idx = np.argmax(parcel_data.S)

@@ -592,21 +646,32 @@ out_file.write("S_max = %f\n" % S_max)

# 2) Activated fraction of each species
T_at_S_max = parcel_data['T'].ix[S_max_idx]
T_at_S_max = parcel_data["T"].iloc[S_max_idx]
total_number = 0.0
total_activated = 0.0
for aerosol in self.aerosols:
act_frac = lognormal_activation(S_max, aerosol.mu*1e-6, aerosol.sigma,
aerosol.N, aerosol.kappa, T=T_at_S_max)
act_num = act_frac*aerosol.N
out_file.write("%s - eq_act_frac = %f (%3.2f/%3.2f)\n" %
(aerosol.species, act_frac, act_num, aerosol.N))
act_frac = lognormal_activation(
S_max,
aerosol.mu * 1e-6,
aerosol.sigma,
aerosol.N,
aerosol.kappa,
T=T_at_S_max,
)
act_num = act_frac * aerosol.N
out_file.write(
"%s - eq_act_frac = %f (%3.2f/%3.2f)\n"
% (aerosol.species, act_frac, act_num, aerosol.N)
)
total_number += aerosol.N
total_activated += act_num
total_act_frac = total_activated/total_number
out_file.write("Total activated fraction = %f (%3.2f/%3.2f)\n" %
(total_act_frac, total_activated, total_number))
total_act_frac = total_activated / total_number
out_file.write(
"Total activated fraction = %f (%3.2f/%3.2f)\n"
% (total_act_frac, total_activated, total_number)
)
def save(self, filename=None, format="nc", other_dfs=None):
output.write_parcel_output(filename=filename, format=format, parcel=self,
other_dfs=other_dfs)
output.write_parcel_output(
filename=filename, format=format, parcel=self, other_dfs=other_dfs
)

@@ -641,134 +706,140 @@ @staticmethod

def der(y, t, nr, r_drys, Nis, V, kappas, accom=c.ac):
""" Calculates the instantaneous time-derivate of the parcel model system.
Given a current state vector `y` of the parcel model, computes the tendency
of each term including thermodynamic (pressure, temperature, etc) and aerosol
terms. The basic aerosol properties used in the model must be passed along
with the state vector (i.e. if being used as the callback function in an ODE
solver).
This function is implemented in NumPy and Python, and is likely *very* slow
compared to the available Cython version.
Parameters
----------
y : array_like
Current state of the parcel model system,
* y[0] = altitude, m
* y[1] = Pressure, Pa
* y[2] = temperature, K
* y[3] = water vapor mass mixing ratio, kg/kg
* y[4] = cloud liquid water mass mixing ratio, kg/kg
* y[5] = cloud ice water mass mixing ratio, kg/kg
* y[6] = parcel supersaturation
* y[7:] = aerosol bin sizes (radii), m
t : float
Current simulation time, in seconds.
nr : Integer
Number of aerosol radii being tracked.
r_drys : array_like
Array recording original aerosol dry radii, m.
Nis : array_like
Array recording aerosol number concentrations, 1/(m**3).
V : float
Updraft velocity, m/s.
kappas : array_like
Array recording aerosol hygroscopicities.
accom : float, optional (default=:const:`constants.ac`)
Condensation coefficient.
Returns
-------
x : array_like
Array of shape (``nr``+7, ) containing the evaluated parcel model
instaneous derivative.
Notes
-----
This Python sketch of the derivative function shouldn't really be used for
any computational purposes. Instead, see the cythonized version in the auxiliary
file, **parcel_aux.pyx**. In the default configuration, once the code has been
built, you can set the environmental variable **OMP_NUM_THREADS** to control
the parallel for loop which calculates the condensational growth rate for each
bin.
"""
z = y[0]
P = y[1]
T = y[2]
wv = y[3]
wc = y[4]
wi = y[5]
S = y[6]
rs = np.asarray(y[c.N_STATE_VARS:])
T_c = T - 273.15 # convert temperature to Celsius
pv_sat = es(T-T_c) # saturation vapor pressure
wv_sat = wv/(S+1.) # saturation mixing ratio
Tv = (1.+0.61*wv)*T # virtual temperature given parcel humidity
e = (1. + S)*pv_sat # water vapor pressure
rho_air = P/(Rd*Tv) # current air density accounting for humidity
rho_air_dry = (P-e)/Rd/T # dry air density
# 1) Pressure
dP_dt = -1.*rho_air*g*V
# 2/3) Wet particle growth rates and droplet liquid water
drs_dt = np.zeros(shape=(nr, ))
dwc_dt = 0.
for i in range(nr):
r = rs[i]
r_dry = r_drys[i]
kappa = kappas[i]
dv_r = dv(T, r, P, accom)
ka_r = ka(T, rho_air, r)
G_a = (rho_w*R*T)/(pv_sat*dv_r*Mw)
G_b = (L*rho_w*((L*Mw/(R*T))-1.))/(ka_r*T)
G = 1./(G_a + G_b)
delta_S = S - Seq(r, r_dry, T, kappa)
dr_dt = (G/r)*delta_S
Ni = Nis[i]
dwc_dt += Ni*(r*r)*dr_dt
drs_dt[i] = dr_dt
# if i == nr-1:
# print 1, r, r_dry, Ni
# print 2, dv(T, r, P, accom), ka(T, rho_air, r)
# print 3, G_a, G_b
# print 4, Seq(r, r_dry, T, kappa, 1.0)
# print 5, dr_dt
dwc_dt *= 4.*np.pi*rho_w/rho_air_dry
# 4) ice water content
dwi_dt = 0.0
# 5) Water vapor content
dwv_dt = -1.*(dwc_dt + dwi_dt)
# 6) Temperature
dT_dt = -g*V/Cp - L*dwv_dt/Cp
# 7) Supersaturation
alpha = (g*Mw*L)/(Cp*R*(T**2)) - (g*Ma)/(R*T)
gamma = (P*Ma)/(Mw*es(T-273.15)) + (Mw*L*L)/(Cp*R*T*T)
dS_dt = alpha*V - gamma*dwc_dt
dz_dt = V
# Repackage tendencies for feedback to numerical solver
x = np.zeros(shape=(nr+c.N_STATE_VARS, ))
x[0] = dz_dt
x[1] = dP_dt
x[2] = dT_dt
x[3] = dwv_dt
x[4] = dwc_dt
x[5] = dwi_dt
x[6] = dS_dt
x[c.N_STATE_VARS:] = drs_dt[:]
return x
#
# def parcel_ode_sys(y, t, nr, r_drys, Nis, V, kappas, accom=c.ac):
# """ Calculates the instantaneous time-derivate of the parcel model system.
#
# Given a current state vector `y` of the parcel model, computes the tendency
# of each term including thermodynamic (pressure, temperature, etc) and aerosol
# terms. The basic aerosol properties used in the model must be passed along
# with the state vector (i.e. if being used as the callback function in an ODE
# solver).
#
# This function is implemented in NumPy and Python, and is likely *very* slow
# compared to the available Cython version.
#
# Parameters
# ----------
# y : array_like
# Current state of the parcel model system,
# * y[0] = altitude, m
# * y[1] = Pressure, Pa
# * y[2] = temperature, K
# * y[3] = water vapor mass mixing ratio, kg/kg
# * y[4] = cloud liquid water mass mixing ratio, kg/kg
# * y[5] = cloud ice water mass mixing ratio, kg/kg
# * y[6] = parcel supersaturation
# * y[7:] = aerosol bin sizes (radii), m
# t : float
# Current simulation time, in seconds.
# nr : Integer
# Number of aerosol radii being tracked.
# r_drys : array_like
# Array recording original aerosol dry radii, m.
# Nis : array_like
# Array recording aerosol number concentrations, 1/(m**3).
# V : float
# Updraft velocity, m/s.
# kappas : array_like
# Array recording aerosol hygroscopicities.
# accom : float, optional (default=:const:`constants.ac`)
# Condensation coefficient.
#
# Returns
# -------
# x : array_like
# Array of shape (``nr``+7, ) containing the evaluated parcel model
# instaneous derivative.
#
# Notes
# -----
# This Python sketch of the derivative function shouldn't really be used for
# any computational purposes. Instead, see the cythonized version in the auxiliary
# file, **parcel_aux.pyx**. In the default configuration, once the code has been
# built, you can set the environmental variable **OMP_NUM_THREADS** to control
# the parallel for loop which calculates the condensational growth rate for each
# bin.
#
# """
# from . import thermo
#
# z = y[0]
# P = y[1]
# T = y[2]
# wv = y[3]
# wc = y[4]
# wi = y[5]
# S = y[6]
# rs = np.asarray(y[c.N_STATE_VARS :])
#
# T_c = T - 273.15 # convert temperature to Celsius
# pv_sat = thermo.es(T - T_c) # saturation vapor pressure
# wv_sat = wv / (S + 1.0) # saturation mixing ratio
# Tv = (1.0 + 0.61 * wv) * T # virtual temperature given parcel humidity
# e = (1.0 + S) * pv_sat # water vapor pressure
# rho_air = P / (c.Rd * Tv) # current air density accounting for humidity
# rho_air_dry = (P - e) / c.Rd / T # dry air density
#
# # 1) Pressure
# dP_dt = -1.0 * rho_air * c.g * V
#
# # 2/3) Wet particle growth rates and droplet liquid water
# drs_dt = np.zeros(shape=(nr,))
# dwc_dt = 0.0
# for i in range(nr):
# r = rs[i]
# r_dry = r_drys[i]
# kappa = kappas[i]
#
# dv_r = thermo.dv(T, r, P, accom)
# ka_r = thermo.ka(T, rho_air, r)
#
# G_a = (c.rho_w * c.R * T) / (pv_sat * dv_r * c.Mw)
# G_b = (c.L * c.rho_w * ((c.L * c.Mw / (c.R * T)) - 1.0)) / (ka_r * T)
# G = 1.0 / (G_a + G_b)
#
# delta_S = S - thermo.Seq(r, r_dry, T, kappa)
# dr_dt = (G / r) * delta_S
# Ni = Nis[i]
# dwc_dt += Ni * (r * r) * dr_dt
# drs_dt[i] = dr_dt
#
# # if i == nr-1:
# # print 1, r, r_dry, Ni
# # print 2, dv(T, r, P, accom), ka(T, rho_air, r)
# # print 3, G_a, G_b
# # print 4, Seq(r, r_dry, T, kappa, 1.0)
# # print 5, dr_dt
#
# dwc_dt *= 4.0 * np.pi * c.rho_w / rho_air_dry
#
# # 4) ice water content
# dwi_dt = 0.0
#
# # 5) Water vapor content
# dwv_dt = -1.0 * (dwc_dt + dwi_dt)
#
# # 6) Temperature
# dT_dt = -c.g * V / c.Cp - c.L * dwv_dt / c.Cp
#
# # 7) Supersaturation
# alpha = (c.g * c.Mw * c.L) / (c.Cp * c.R * (T ** 2))
# alpha -= (c.g * c.Ma) / (c.R * T)
#
# gamma = (P * c.Ma) / (c.Mw * thermo.es(T - 273.15))
# gamma += (c.Mw * c.L * c.L) / (c.Cp * c.R * T * T)
# dS_dt = alpha * V - gamma * dwc_dt
#
# dz_dt = V
#
# # Repackage tendencies for feedback to numerical solver
# x = np.zeros(shape=(nr + c.N_STATE_VARS,))
# x[0] = dz_dt
# x[1] = dP_dt
# x[2] = dT_dt
# x[3] = dwv_dt
# x[4] = dwc_dt
# x[5] = dwi_dt
# x[6] = dS_dt
# x[c.N_STATE_VARS :] = drs_dt[:]
#
# return x
""" Collection of output post-processing routines.
"""
from __future__ import absolute_import
from builtins import range
from . activation import binned_activation
import numpy as np
import pandas as pd
from .activation import binned_activation
def simulation_activation(model, parcel_df, aerosols_panel):
""" Given the DataFrame output from a parcel model simulation, compute
"""Given the DataFrame output from a parcel model simulation, compute
activation kinetic limitation diagnostics.

@@ -33,9 +30,9 @@

initial_row = parcel_df.iloc[0]
Smax_i, T_i = initial_row['S'], initial_row['T']
Smax_i, T_i = initial_row["S"], initial_row["T"]
acts = {'eq': [], 'kn': [], 'alpha': [], 'phi': []}
acts = {"eq": [], "kn": [], "alpha": [], "phi": []}
initial_aerosols = model.aerosols
N_all_modes = np.sum([aer.total_N for aer in initial_aerosols])
N_fracs = {aer.species: aer.total_N/N_all_modes for aer in initial_aerosols}
N_fracs = {aer.species: aer.total_N / N_all_modes for aer in initial_aerosols}
for i in range(len(parcel_df)):

@@ -46,7 +43,7 @@ row_par = parcel_df.iloc[i]

# Update thermo
T_i = row_par['T']
if row_par['S'] > Smax_i:
Smax_i = row_par['S']
T_i = row_par["T"]
if row_par["S"] > Smax_i:
Smax_i = row_par["S"]
eq_tot, kn_tot, alpha_tot, phi_tot = 0., 0., 0., 0.
eq_tot, kn_tot, alpha_tot, phi_tot = 0.0, 0.0, 0.0, 0.0
for aerosol in initial_aerosols:

@@ -57,13 +54,13 @@ N_frac = N_fracs[aerosol.species]

eq, kn, alpha, phi = binned_activation(Smax_i, T_i, rs, aerosol)
eq_tot += eq*N_frac
kn_tot += kn*N_frac
alpha_tot += alpha*N_frac
phi_tot += phi*N_frac
eq_tot += eq * N_frac
kn_tot += kn * N_frac
alpha_tot += alpha * N_frac
phi_tot += phi * N_frac
acts['kn'].append(kn_tot)
acts['eq'].append(eq_tot)
acts['alpha'].append(alpha_tot)
acts['phi'].append(phi_tot)
acts["kn"].append(kn_tot)
acts["eq"].append(eq_tot)
acts["alpha"].append(alpha_tot)
acts["phi"].append(phi_tot)
acts_total = pd.DataFrame(acts, index=parcel_df.index)
return acts_total

@@ -9,14 +9,12 @@ # -*- coding: utf-8 -*-

"""
from builtins import map
import numpy as np
from scipy.optimize import fminbound
from . constants import *
from .constants import *
# THERMODYNAMIC FUNCTIONS
def dv_cont(T, P):
""" Diffusivity of water vapor in air, neglecting non-continuum effects.
"""Diffusivity of water vapor in air, neglecting non-continuum effects.

@@ -42,8 +40,8 @@ See :func:`dv` for details.

"""
P_atm = P*1.01325e-5 # Pa -> atm
return 1e-4*(0.211/P_atm)*((T/273.)**1.94)
P_atm = P * 1.01325e-5 # Pa -> atm
return 1e-4 * (0.211 / P_atm) * ((T / 273.0) ** 1.94)
def dv(T, r, P, accom=ac):
""" Diffusivity of water vapor in air, modified for non-continuum effects.
"""Diffusivity of water vapor in air, modified for non-continuum effects.

@@ -100,8 +98,8 @@ The diffusivity of water vapor in air as a function of temperature and pressure

dv_t = dv_cont(T, P)
denom = 1.0 + (dv_t/(accom*r))*np.sqrt((2.*np.pi*Mw)/(R*T))
return dv_t/denom
denom = 1.0 + (dv_t / (accom * r)) * np.sqrt((2.0 * np.pi * Mw) / (R * T))
return dv_t / denom
def rho_air(T, P, RH=1.0):
""" Density of moist air with a given relative humidity, temperature, and pressure.
"""Density of moist air with a given relative humidity, temperature, and pressure.

@@ -138,5 +136,5 @@ Uses the traditional formula from the ideal gas law (3.41)[Petty2006].

"""
qsat = RH*0.622*(es(T-273.15)/P)
Tv = T*(1.0 + 0.61*qsat)
rho_a = P/Rd/Tv # air density
qsat = RH * 0.622 * (es(T - 273.15) / P)
Tv = T * (1.0 + 0.61 * qsat)
rho_a = P / Rd / Tv # air density

@@ -147,3 +145,3 @@ return rho_a

def es(T_c):
""" Calculates the saturation vapor pressure over water for a given temperature.
"""Calculates the saturation vapor pressure over water for a given temperature.

@@ -180,7 +178,7 @@ Uses an empirical fit [Bolton1980], which is accurate to :math:`0.1\%` over the

"""
return 611.2*np.exp(17.67*T_c/(T_c+243.5))
return 611.2 * np.exp(17.67 * T_c / (T_c + 243.5))
def ka_cont(T):
""" Thermal conductivity of air, neglecting non-continuum effects.
"""Thermal conductivity of air, neglecting non-continuum effects.

@@ -204,7 +202,7 @@ See :func:`ka` for details.

"""
return 1e-3*(4.39 + 0.071*T)
return 1e-3 * (4.39 + 0.071 * T)
def ka(T, rho, r):
""" Thermal conductivity of air, modified for non-continuum effects.
"""Thermal conductivity of air, modified for non-continuum effects.

@@ -256,8 +254,8 @@ The thermal conductivity of air is given by

ka_t = ka_cont(T)
denom = 1.0 + (ka_t/(at*r*rho*Cp))*np.sqrt((2.*np.pi*Ma)/(R*T))
return ka_t/denom
denom = 1.0 + (ka_t / (at * r * rho * Cp)) * np.sqrt((2.0 * np.pi * Ma) / (R * T))
return ka_t / denom
def sigma_w(T):
""" Surface tension of water for a given temperature.
"""Surface tension of water for a given temperature.

@@ -280,3 +278,3 @@ .. math::

"""
return 0.0761 - 1.55e-4*(T-273.15)
return 0.0761 - 1.55e-4 * (T - 273.15)

@@ -339,5 +337,5 @@

"""
A = (2.*Mw*sigma_w(T))/(R*T*rho_w*r)
B = (r**3 - (r_dry**3))/(r**3 - (r_dry**3)*(1.-kappa))
s = np.exp(A)*B - 1.0
A = (2.0 * Mw * sigma_w(T)) / (R * T * rho_w * r)
B = (r**3 - (r_dry**3)) / (r**3 - (r_dry**3) * (1.0 - kappa))
s = np.exp(A) * B - 1.0
return s

@@ -347,3 +345,3 @@

def Seq_approx(r, r_dry, T, kappa):
""" Approximate κ-Kohler theory equilibrium saturation over aerosol.
"""Approximate κ-Kohler theory equilibrium saturation over aerosol.

@@ -386,8 +384,10 @@ Calculates the equilibrium supersaturation (relative to 100% RH) over an

"""
A = (2.*Mw*sigma_w(T))/(R*T*rho_w*r)
return A - kappa*(r_dry**3)/(r**3) # the minus 1.0 is built into this expression
A = (2.0 * Mw * sigma_w(T)) / (R * T * rho_w * r)
return A - kappa * (r_dry**3) / (
r**3
) # the minus 1.0 is built into this expression
def kohler_crit(T, r_dry, kappa, approx=False):
""" Critical radius and supersaturation of an aerosol particle.
"""Critical radius and supersaturation of an aerosol particle.

@@ -428,10 +428,11 @@ The critical size of an aerosol particle corresponds to the maximum equilibrium

if approx:
A = (2.*Mw*sigma_w(T))/(R*T*rho_w)
s_crit = np.sqrt((4.*(A**3))/(27*kappa*(r_dry**3)))
r_crit = np.sqrt((3.*kappa*(r_dry**3))/A)
A = (2.0 * Mw * sigma_w(T)) / (R * T * rho_w)
s_crit = np.sqrt((4.0 * (A**3)) / (27 * kappa * (r_dry**3)))
r_crit = np.sqrt((3.0 * kappa * (r_dry**3)) / A)
else:
neg_Seq = lambda r : -1.*Seq(r, r_dry, T, kappa)
out = fminbound(neg_Seq, r_dry, r_dry*1e4,
xtol=1e-10, full_output=True, disp=0)
neg_Seq = lambda r: -1.0 * Seq(r, r_dry, T, kappa)
out = fminbound(
neg_Seq, r_dry, r_dry * 1e4, xtol=1e-10, full_output=True, disp=0
)
r_crit, s_crit = out[:2]

@@ -444,3 +445,3 @@ s_crit *= -1.0 # multiply by -1 to undo negative flag for Seq

def critical_curve(T, r_a, r_b, kappa, approx=False):
""" Calculates curves of critical radii and supersaturations for aerosol.
"""Calculates curves of critical radii and supersaturations for aerosol.

@@ -471,2 +472,3 @@ Calls :func:`kohler_crit` for values of ``r_dry`` between ``r_a`` and ``r_b``

"""
def crit_func(rd):

@@ -483,2 +485,3 @@ kohler_crit(T, rd, kappa, approx)

# MICROPHYSICS

@@ -518,2 +521,2 @@

"""
return (3.*rho*wc/(4.*np.pi*rho_w*Ni))**(1./3.)
return (3.0 * rho * wc / (4.0 * np.pi * rho_w * Ni)) ** (1.0 / 3.0)

@@ -0,2 +1,8 @@

# file generated by setuptools_scm
# don't change, don't track in version control
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Tuple
__version__ = '1.3.1'
__version__ = version = '1.3.2' # type: str
__version_tuple__ = version_tuple = (1, 3, 2) # type: Tuple[int | str, ...]

@@ -1,11 +0,6 @@

from builtins import zip
import numpy as np
import pyrcel as pm
import matplotlib.pyplot as plt
import seaborn as sns
def plot_distribution(aer, aer_kwargs={},
ax=None, **kwargs):
""" Generate a comparison plot of a given aerosol or
def plot_distribution(aer, aer_kwargs={}, ax=None, **kwargs):
"""Generate a comparison plot of a given aerosol or
droplet distribution

@@ -29,4 +24,6 @@

## Add some basic aer_kwargs if not provided
if not 'color' in aer_kwargs: aer_kwargs['color'] = 'g'
if not 'alpha' in aer_kwargs: aer_kwargs['alpha'] = 0.5
if not "color" in aer_kwargs:
aer_kwargs["color"] = "g"
if not "alpha" in aer_kwargs:
aer_kwargs["alpha"] = 0.5

@@ -36,10 +33,10 @@ rl, rr = aer.rs[0], aer.rs[-1]

r_width = aer.rs[1:] - r_left
r_mean = np.sqrt(aer.rs[1:]*r_left)
bin_height = aer.Nis/1e6
r_mean = np.sqrt(aer.rs[1:] * r_left)
bin_height = aer.Nis / 1e6
bars = ax.bar(r_left, bin_height, width=r_width, **aer_kwargs)
legend_objects = [(bars[0], "%s bins" % aer.species), ]
legend_objects = [(bars[0], "%s bins" % aer.species)]
handles, labels = list(zip(*legend_objects))
ax.legend(handles, labels, loc='upper right')
ax.legend(handles, labels, loc="upper right")

@@ -50,2 +47,2 @@ ax.semilogx()

ax.set_xlabel("$r$ ($\mu$m)")
ax.set_ylabel("Number Concentration (cm$^{-3}$)")
ax.set_ylabel("Number Concentration (cm$^{-3}$)")
#!/usr/bin/env bash
# Quick script to build the documentation
# Generate .rst files from the notebooks containing pre-rendered examples
cd examples
make
# Go back and generate the stub automethod templates in the API reference
cd ../
make gen
# Now make the html documents
make html
# -*- coding: utf-8 -*-
#
# pyrcel documentation build configuration file, created by
# sphinx-quickstart on Wed Jul 2 15:49:19 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'numpydoc',
'IPython.sphinxext.ipython_directive',
'IPython.sphinxext.ipython_console_highlighting',
]
# Fix toctree bug http://stackoverflow.com/questions/12206334/sphinx-autosummary-toctree-contains-reference-to-nonexisting-document-warnings
numpydoc_show_class_members = False
autosummary_generate = True
numpydoc_class_members_toctree = True
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pyrcel'
copyright = u'2016, Daniel Rothenberg'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
from pyrcel import __version__ as version
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'templates',]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from
# docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# 'bootstrap_version': "3",
# 'bootswatch_theme': "cosmo", #"yeti"
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
#html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), ]
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyrceldoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pyrcel.tex', u'pyrcel documentation',
u'Daniel Rothenberg', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyrcel', u'pyrcel Documentation',
[u'Daniel Rothenberg'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyrcel', u'pyrcel Documentation',
u'Daniel Rothenberg', 'pyrcel',
'Adiabatic cloud parcel model for aerosol activation studies',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
name: pyrcel
dependencies:
- python=2.7
- cython
- future
- pandas
- pyyaml
- scipy
- numpy
- xarray
- numpydoc
- ipython
- sphinx

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

.. _example_activate:
.. currentmodule:: parcel_model
Example: Activation
===================
In this example, we will study the effect of updraft speed on the
activation of a lognormal ammonium sulfate accumulation mode aerosol.
.. code:: python
# Suppress warnings
import warnings
warnings.simplefilter('ignore')
import pyrcel as pm
import numpy as np
%matplotlib inline
import matplotlib.pyplot as plt
import seaborn as sns
First, we indicate the parcel's initial thermodynamic conditions.
.. code:: python
P0 = 100000. # Pressure, Pa
T0 = 279. # Temperature, K
S0 = -0.1 # Supersaturation, 1-RH
We next define the aerosol distribution to follow the reference
simulation from `Ghan et al,
2011 <http://onlinelibrary.wiley.com/doi/10.1029/2011MS000074/abstract>`__
.. code:: python
aer = pm.AerosolSpecies('ammonium sulfate',
pm.Lognorm(mu=0.05, sigma=2.0, N=1000.),
kappa=0.7, bins=100)
Loop over updraft several velocities in the range 0.1 - 10.0 m/s. We
will peform a detailed parcel model calculation, as well as calculations
with two activation parameterizations. We will also use an accommodation
coefficient of :math:`\alpha_c = 0.1`, following the recommendations of
`Raatikainen et al (2013) <http://www.pnas.org/content/110/10/3760>`__.
First, the parcel model calculations:
.. code:: python
from pyrcel import binned_activation
Vs = np.logspace(-1, np.log10(10,), 11.)[::-1] # 0.1 - 5.0 m/s
accom = 0.1
smaxes, act_fracs = [], []
for V in Vs:
# Initialize the model
model = pm.ParcelModel([aer,], V, T0, S0, P0, accom=accom, console=False)
par_out, aer_out = model.run(t_end=2500., dt=1.0, solver='cvode',
output='dataframes', terminate=True)
print(V, par_out.S.max())
# Extract the supersaturation/activation details from the model
# output
S_max = par_out['S'].max()
time_at_Smax = par_out['S'].argmax()
wet_sizes_at_Smax = aer_out['ammonium sulfate'].ix[time_at_Smax].iloc[0]
wet_sizes_at_Smax = np.array(wet_sizes_at_Smax.tolist())
frac_eq, _, _, _ = binned_activation(S_max, T0, wet_sizes_at_Smax, aer)
# Save the output
smaxes.append(S_max)
act_fracs.append(frac_eq)
.. parsed-literal::
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
10.0 0.0156189147154
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
6.3095734448 0.0116683910368
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
3.98107170553 0.00878287310116
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
2.51188643151 0.00664901290831
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
1.58489319246 0.00505644091867
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
1.0 0.00385393398982
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.63095734448 0.00293957320198
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.398107170553 0.00224028774582
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.251188643151 0.00170480101361
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.158489319246 0.0012955732509
[CVode Warning] b'At the end of the first step, there are still some root functions identically 0. This warning will not be issued again.'
0.1 0.000984803827635
Now the activation parameterizations:
.. code:: python
smaxes_arg, act_fracs_arg = [], []
smaxes_mbn, act_fracs_mbn = [], []
for V in Vs:
smax_arg, _, afs_arg = pm.arg2000(V, T0, P0, [aer], accom=accom)
smax_mbn, _, afs_mbn = pm.mbn2014(V, T0, P0, [aer], accom=accom)
smaxes_arg.append(smax_arg)
act_fracs_arg.append(afs_arg[0])
smaxes_mbn.append(smax_mbn)
act_fracs_mbn.append(afs_mbn[0])
Finally, we compile our results into a nice plot for visualization.
.. code:: python
sns.set(context="notebook", style='ticks')
sns.set_palette("husl", 3)
fig, [ax_s, ax_a] = plt.subplots(1, 2, sharex=True, figsize=(10,4))
ax_s.plot(Vs, np.array(smaxes)*100., color='k', lw=2, label="Parcel Model")
ax_s.plot(Vs, np.array(smaxes_mbn)*100., linestyle='None',
marker="o", ms=10, label="MBN2014" )
ax_s.plot(Vs, np.array(smaxes_arg)*100., linestyle='None',
marker="o", ms=10, label="ARG2000" )
ax_s.semilogx()
ax_s.set_ylabel("Superaturation Max, %")
ax_s.set_ylim(0, 2.)
ax_a.plot(Vs, act_fracs, color='k', lw=2, label="Parcel Model")
ax_a.plot(Vs, act_fracs_mbn, linestyle='None',
marker="o", ms=10, label="MBN2014" )
ax_a.plot(Vs, act_fracs_arg, linestyle='None',
marker="o", ms=10, label="ARG2000" )
ax_a.semilogx()
ax_a.set_ylabel("Activated Fraction")
ax_a.set_ylim(0, 1.)
plt.tight_layout()
sns.despine()
for ax in [ax_s, ax_a]:
ax.legend(loc='upper left')
ax.xaxis.set_ticks([0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0])
ax.xaxis.set_ticklabels([0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0])
ax.set_xlabel("Updraft speed, m/s")
.. image:: activate_files/activate_13_0.png

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

.. _example_basic:
.. currentmodule:: parcel_model
Example: Basic Run
==================
In this example, we will setup a simple parcel model simulation
containing two aerosol modes. We will then run the model with a 1 m/s
updraft, and observe how the aerosol population bifurcates into swelled
aerosol and cloud droplets.
.. code:: python
# Suppress warnings
import warnings
warnings.simplefilter('ignore')
import pyrcel as pm
import numpy as np
%matplotlib inline
import matplotlib.pyplot as plt
.. parsed-literal::
Could not find GLIMDA
First, we indicate the parcel's initial thermodynamic conditions.
.. code:: python
P0 = 77500. # Pressure, Pa
T0 = 274. # Temperature, K
S0 = -0.02 # Supersaturation, 1-RH (98% here)
Next, we define the aerosols present in the parcel. The model itself is
agnostic to how the aerosol are specified; it simply expects lists of
the radii of wetted aerosol radii, their number concentration, and their
hygroscopicity. We can make container objects
(:class:``AerosolSpecies``) that wrap all of this information so that we
never need to worry about it.
Here, let's construct two aerosol modes:
+----------+-----------------------+-----------------+---------+--------------------+
| Mode | :math:`\kappa` | Mean size | Std dev | Number Conc |
| | (hygroscopicity) | (micron) | | (cm\*\*-3) |
+==========+=======================+=================+=========+====================+
| sulfate | 0.54 | 0.015 | 1.6 | 850 |
+----------+-----------------------+-----------------+---------+--------------------+
| sea salt | 1.2 | 0.85 | 1.2 | 10 |
+----------+-----------------------+-----------------+---------+--------------------+
We'll define each mode using the :class:``Lognorm`` distribution
packaged with the model.
.. code:: python
sulfate = pm.AerosolSpecies('sulfate',
pm.Lognorm(mu=0.015, sigma=1.6, N=850.),
kappa=0.54, bins=200)
sea_salt = pm.AerosolSpecies('sea salt',
pm.Lognorm(mu=0.85, sigma=1.2, N=10.),
kappa=1.2, bins=40)
The :class:``AerosolSpecies`` class automatically computes
gridded/binned representations of the size distributions. Let's double
check that the aerosol distribution in the model will make sense by
plotting the number concentration in each bin.
.. code:: python
fig = plt.figure(figsize=(10,5))
ax = fig.add_subplot(111)
ax.grid(False, "minor")
sul_c = "#CC0066"
ax.bar(sulfate.rs[:-1], sulfate.Nis*1e-6, np.diff(sulfate.rs),
color=sul_c, label="sulfate", edgecolor="#CC0066")
sea_c = "#0099FF"
ax.bar(sea_salt.rs[:-1], sea_salt.Nis*1e-6, np.diff(sea_salt.rs),
color=sea_c, label="sea salt", edgecolor="#0099FF")
ax.semilogx()
ax.set_xlabel("Aerosol dry radius, micron")
ax.set_ylabel("Aerosl number conc., cm$^{-3}$")
ax.legend(loc='upper right')
.. parsed-literal::
<matplotlib.legend.Legend at 0x10f4baeb8>
.. image:: basic_run_files/basic_run_9_1.png
Actually running the model is very straightforward, and involves just
two steps:
1. Instantiate the model by creating a :class:``ParcelModel`` object.
2. Call the model's :method:``run`` method.
For convenience this process is encoded into several routines in the
``driver`` file, including both a single-strategy routine and an
iterating routine which adjusts the the timestep and numerical
tolerances if the model crashes. However, we can illustrate the simple
model running process here in case you wish to develop your own scheme
for running the model.
.. code:: python
initial_aerosols = [sulfate, sea_salt]
V = 1.0 # updraft speed, m/s
dt = 1.0 # timestep, seconds
t_end = 250./V # end time, seconds... 250 meter simulation
model = pm.ParcelModel(initial_aerosols, V, T0, S0, P0, console=False, accom=0.3)
parcel_trace, aerosol_traces = model.run(t_end, dt, solver='cvode')
If ``console`` is set to ``True``, then some basic debugging output will
be written to the terminal, including the initial equilibrium droplet
size distribution and some numerical solver diagnostics. The model
output can be customized; by default, we get a DataFrame and a Panel of
the parcel state vector and aerosol bin sizes as a function of time (and
height). We can use this to visualize the simulation results, like in
the package's
`README <https://github.com/darothen/parcel_model/blob/master/README.md>`__.
.. code:: python
fig, [axS, axA] = plt.subplots(1, 2, figsize=(10, 4), sharey=True)
axS.plot(parcel_trace['S']*100., parcel_trace['z'], color='k', lw=2)
axT = axS.twiny()
axT.plot(parcel_trace['T'], parcel_trace['z'], color='r', lw=1.5)
Smax = parcel_trace['S'].max()*100
z_at_smax = parcel_trace['z'].ix[parcel_trace['S'].argmax()]
axS.annotate("max S = %0.2f%%" % Smax,
xy=(Smax, z_at_smax),
xytext=(Smax-0.3, z_at_smax+50.),
arrowprops=dict(arrowstyle="->", color='k',
connectionstyle='angle3,angleA=0,angleB=90'),
zorder=10)
axS.set_xlim(0, 0.7)
axS.set_ylim(0, 250)
axT.set_xticks([270, 271, 272, 273, 274])
axT.xaxis.label.set_color('red')
axT.tick_params(axis='x', colors='red')
axS.set_xlabel("Supersaturation, %")
axT.set_xlabel("Temperature, K")
axS.set_ylabel("Height, m")
sulf_array = aerosol_traces['sulfate'].values
sea_array = aerosol_traces['sea salt'].values
ss = axA.plot(sulf_array[:, ::10]*1e6, parcel_trace['z'], color=sul_c,
label="sulfate")
sa = axA.plot(sea_array*1e6, parcel_trace['z'], color=sea_c, label="sea salt")
axA.semilogx()
axA.set_xlim(1e-2, 10.)
axA.set_xticks([1e-2, 1e-1, 1e0, 1e1], [0.01, 0.1, 1.0, 10.0])
axA.legend([ss[0], sa[0]], ['sulfate', 'sea salt'], loc='upper right')
axA.set_xlabel("Droplet radius, micron")
for ax in [axS, axA, axT]:
ax.grid(False, 'both', 'both')
.. image:: basic_run_files/basic_run_13_0.png
In this simple example, the sulfate aerosol population bifurcated into
interstitial aerosol and cloud droplets, while the entire sea salt
population activated. A peak supersaturation of about 0.63% was reached
a few meters above cloud base, where the ambient relative humidity hit
100%.
How many CDNC does this translate into? We can call upon helper methods
from the ``activation`` package to perform these calculations for us:
.. code:: python
from pyrcel import binned_activation
sulf_trace = aerosol_traces['sulfate']
sea_trace = aerosol_traces['sea salt']
ind_final = int(t_end/dt) - 1
T = parcel_trace['T'].iloc[ind_final]
eq_sulf, kn_sulf, alpha_sulf, phi_sulf = \
binned_activation(Smax/100, T, sulf_trace.iloc[ind_final], sulfate)
eq_sulf *= sulfate.total_N
eq_sea, kn_sea, alpha_sea, phi_sea = \
binned_activation(Smax/100, T, sea_trace.iloc[ind_final], sea_salt)
eq_sea *= sea_salt.total_N
print(" CDNC(sulfate) = {:3.1f}".format(eq_sulf))
print(" CDNC(sea salt) = {:3.1f}".format(eq_sea))
print("------------------------")
print(" total = {:3.1f} / {:3.0f} ~ act frac = {:1.2f}".format(
eq_sulf+eq_sea,
sea_salt.total_N+sulfate.total_N,
(eq_sulf+eq_sea)/(sea_salt.total_N+sulfate.total_N)
))
.. parsed-literal::
CDNC(sulfate) = 146.9
CDNC(sea salt) = 10.0
------------------------
total = 156.9 / 860 ~ act frac = 0.18
NOTEBOOKS := $(wildcard *.ipynb)
RSTS := $(NOTEBOOKS:.ipynb=.rst)
.PHONY: all
all: $(RSTS)
%.rst: %.ipynb
jupyter-nbconvert $^ --to rst
clean:
rm *.rst
rm -rf *_files/
rm -rf _build/
Because the Assimulo dependency is difficult to build on third-party content providers (travis, RTD, etc), I've opted to manually control the examples in this directory. Here are the steps to keep the examples up to date:
1. Run each notebook independently, so that the results (figures, etc) it includes are pre-rendered and self-contained
2. Execute the supplied Makefile to convert ipynb -> rst and generate hte rendered figures
3. Commit the newly generated *_files/ folders and all new resources
In the future, this can be simplified by packaging the entire parcel model install. However, that will require either (a) re-opening the ability to use third-party ODE solvers from SciPy, or (b) packaging Assimulo and Sundials into conda for easy access on a third-party server.

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

pyrcel\.activation\.arg2000
===========================
.. currentmodule:: pyrcel.activation
.. autofunction:: arg2000
pyrcel\.activation\.binned\_activation
======================================
.. currentmodule:: pyrcel.activation
.. autofunction:: binned_activation
pyrcel\.activation\.lognormal\_activation
=========================================
.. currentmodule:: pyrcel.activation
.. autofunction:: lognormal_activation
pyrcel\.activation\.mbn2014
===========================
.. currentmodule:: pyrcel.activation
.. autofunction:: mbn2014
pyrcel\.activation\.ming2006
============================
.. currentmodule:: pyrcel.activation
.. autofunction:: ming2006
pyrcel\.activation\.multi\_mode\_activation
===========================================
.. currentmodule:: pyrcel.activation
.. autofunction:: multi_mode_activation
pyrcel\.activation\.shipwayabel2010
===================================
.. currentmodule:: pyrcel.activation
.. autofunction:: shipwayabel2010
:mod:`pyrcel.aerosol`.AerosolSpecies
===========================================
.. currentmodule:: pyrcel.aerosol
.. autoclass:: AerosolSpecies
:members:
:undoc-members:
.. automethod:: __init__
pyrcel\.aerosol\.dist\_to\_conc
===============================
.. currentmodule:: pyrcel.aerosol
.. autofunction:: dist_to_conc
:mod:`pyrcel.distributions`.BaseDistribution
===================================================
.. currentmodule:: pyrcel.distributions
.. autoclass:: BaseDistribution
:members:
:undoc-members:
.. automethod:: __init__
:mod:`pyrcel.distributions`.Gamma
========================================
.. currentmodule:: pyrcel.distributions
.. autoclass:: Gamma
:members:
:undoc-members:
.. automethod:: __init__
:mod:`pyrcel.distributions`.Lognorm
==========================================
.. currentmodule:: pyrcel.distributions
.. autoclass:: Lognorm
:members:
:undoc-members:
.. automethod:: __init__
:mod:`pyrcel.distributions`.MultiModeLognorm
===================================================
.. currentmodule:: pyrcel.distributions
.. autoclass:: MultiModeLognorm
:members:
:undoc-members:
.. automethod:: __init__
pyrcel\.driver\.iterate\_runs
=============================
.. currentmodule:: pyrcel.driver
.. autofunction:: iterate_runs
pyrcel\.driver\.run\_model
==========================
.. currentmodule:: pyrcel.driver
.. autofunction:: run_model
pyrcel\.ParcelModel
===================
.. currentmodule:: pyrcel
.. autoclass:: ParcelModel
.. automethod:: __init__
.. rubric:: Methods
.. autosummary::
~ParcelModel.__init__
~ParcelModel.run
~ParcelModel.save
~ParcelModel.set_initial_conditions
~ParcelModel.write_csv
~ParcelModel.write_summary
pyrcel\.thermo\.critical\_curve
===============================
.. currentmodule:: pyrcel.thermo
.. autofunction:: critical_curve
pyrcel\.thermo\.dv
==================
.. currentmodule:: pyrcel.thermo
.. autofunction:: dv
pyrcel\.thermo\.es
==================
.. currentmodule:: pyrcel.thermo
.. autofunction:: es
pyrcel\.thermo\.ka
==================
.. currentmodule:: pyrcel.thermo
.. autofunction:: ka
pyrcel\.thermo\.kohler\_crit
============================
.. currentmodule:: pyrcel.thermo
.. autofunction:: kohler_crit
pyrcel\.thermo\.rho\_air
========================
.. currentmodule:: pyrcel.thermo
.. autofunction:: rho_air
pyrcel\.thermo\.Seq\_approx
===========================
.. currentmodule:: pyrcel.thermo
.. autofunction:: Seq_approx
pyrcel\.thermo\.Seq
===================
.. currentmodule:: pyrcel.thermo
.. autofunction:: Seq
pyrcel\.thermo\.sigma\_w
========================
.. currentmodule:: pyrcel.thermo
.. autofunction:: sigma_w
pyrcel: cloud parcel model
==========================
|DOI|\ |Build Status|\ |Documentation Status|
.. |DOI| image:: https://zenodo.org/badge/12927551.svg
:target: https://zenodo.org/badge/latestdoi/12927551
.. |Build Status| image:: https://travis-ci.org/darothen/pyrcel.svg?branch=master
:target: https://travis-ci.org/darothen/pyrcel
.. |Documentation Status| image:: https://readthedocs.org/projects/pyrcel/badge/?version=stable
:target: http://pyrcel.readthedocs.org/en/stable/?badge=stable
This is an implementation of a simple, 0D adiabatic cloud parcel model tool (following `Nenes et al, 2001`_ and `Pruppacher and Klett, 1997`_). It allows flexible descriptions of an initial aerosol population, and simulates the evolution of a proto-cloud droplet population as the parcel ascends adiabatically at either a constant or time/height-dependent updraft speed. Droplet growth within the parcel is tracked on a Lagrangian grid.
.. _Pruppacher and Klett, 1997: http://books.google.com/books?hl=en&lr=&id=1mXN_qZ5sNUC&oi=fnd&pg=PR15&ots=KhdkC6uhB3&sig=PSlNsCeLSB2FvR93Vzo0ptCAnYA#v=onepage&q&f=false
.. _Nenes et al, 2001: http://onlinelibrary.wiley.com/doi/10.1034/j.1600-0889.2001.d01-12.x/abstract
.. image:: figs/model_example.png
You are invited to use the model (in accordance with the `licensing <https://raw
.githubusercontent.com/darothen/pyrcel/master/LICENSE>`_), but please get in
touch with the author via `e-mail <mailto:darothen@mit.edu>`_ or on
`twitter <https://twitter.com/darothen>`_. p-to-date versions can be obtained
through the model's `github repository <https://github.com/darothen/pyrcel>`_
or directly from the author. If you use the model for research, please cite
`this journal article <http://journals.ametsoc.org/doi/abs/10.1175/JAS-D-15-0223.1>`_
which details the original model formulation:
| Daniel Rothenberg and Chien Wang, 2016: Metamodeling of Droplet Activation for Global Climate Models. *J. Atmos. Sci.*, **73**, 1255–1272. doi: http://dx.doi.org/10.1175/JAS-D-15-0223.1
|
|
Documentation Outline
---------------------
.. toctree::
:maxdepth: 2
:glob:
sci_descr
install
examples/*
parcel
reference
Current version: |version|
Documentation last compiled: |today|
.. _install:
Installation
============
With conda
----------
Coming soon!
From pip
---------
We now make available recent releases on ``pip``.
From source code
----------------
To grab and build the latest bleeding-edge version of the model, you should use
``pip`` and point it to the source code `repository`_ on github:
.. code-block:: bash
$ pip install git+git://github.com/darothen/pyrcel.git
This should automatically build the necessary Cython modules and export the
code package to your normal package installation directory. If you wish to
simply build the code and run it in place, clone the `repository`_, navigate
to it in a terminal, and invoke the build command by hand:
.. code-block:: bash
$ python setup.py build_ext --inplace
This should produce the compiled file `parcel_aux.so` in the model package.
You can also install the code from the cloned source directory by invoking
``pip install`` from within it; this is useful if you're updating or
modifying the model, since you can install an "editable" package which
points directly to the git-monitored code:
.. code-block:: bash
$ cd path/to/pyrcel/
$ pip install -e .
Dependencies
------------
This code was originally written for Python 2.7, and then
`futurized <http://python-future.org/>`_ to Python 3.3+ with hooks for
backwards compatibility. By far, the simplest way to run this code is to grab a
scientific python distribution, such as
`Anaconda <https://store.continuum.io/cshop/anaconda/>`_. This code should work
out-of-the box with almost all dependencies filled (exception being numerical
solvers) on a recent version (1.2+) of this distribution. To faciliate this,
`conda <http://conda.pydata.org/docs/>`_ environments for Python versions 2.7
and 3.4+ are provided in the ``pyrcel/ci`` directory.
Necessary dependencies
^^^^^^^^^^^^^^^^^^^^^^
- `Assimulo <http://www.jmodelica.org/assimulo_home/index.html>`_
- `Cython <http://cython.org/>`_
- `future <http://python-future.org/>`_
- `numpy <http://www.numpy.org/>`_
- `scipy <http://www.scipy.org/>`_
- `pandas <http://pandas.pydata.org/>`_
.. note::
As of version 1.2.0, the model integration components are being re-written
and only the CVODE interface is exposed. As such, Assimulo is temporarily
a core and required dependency; in the future the other solvers will
be re-enabled. You should first try to install Assimulo via conda
.. code-block:: bash
$ conda install -c conda-forge assimulo
since this will automatically take care of obtaining necessary compiled
dependencies like sundials. However, for best results you may want to
`manually install Assimulo <http://www.jmodelica.org/assimulo_home/installation.html>`_,
since the conda-forge recipe may default to a sundials/OpenBLAS combination
which could degare the performance of the model.
Numerical solver dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- **LSODA** - `scipy <http://www.scipy.org/>`_ or
`odespy <https://github.com/hplgit/odespy/>`_
- **VODE**, **LSODE** - `odespy <https://github.com/hplgit/odespy/>`_
- **CVODE** - `Assimulo <http://www.jmodelica.org/assimulo_home/index.html>`_
Recommended additional packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- `matplotlib <http://matplotlib.sourceforge.net/>`_
- `seaborn <http://stanford.edu/~mwaskom/software/seaborn/index.html>`_
- `PyYAML <http://pyyaml.org/wiki/PyYAMLDocumentation>`_
- `xarray <http://xarray.pydata.org/en/stable/>`_
Testing
-------
A nose test-suite is under construction. To check that your model is configured
and running correctly, you copy and run the notebook corresponding to the
:ref:`basic run example <example_basic>`, or run the command-line interface
version of the model with the pre-packed simple run case:
.. code-block:: bash
$ cd path/to/pyrcel/
$ ./run_parcel examples/simple.yml
Bugs / Suggestions
------------------
The code has an
`issue tracker on github <https://github.com/darothen/pyrcel/issues>`_
and I strongly encourage you to note any problems with the model there, such
as typos or weird behavior and results. Furthermore, I'm looking for ways to
expand and extend the model, so if there is something you might wish to see
added, please note it there or `send me an e-mail <mailto:darothen@mit.edu>`_.
The code was written in such a way that it should be trivial to add physics in a modular fashion.
.. _repository: http://github.com/darothen/pyrcel
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html notebooks gen dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/* generated/
rm -rf examples/*_files/
rm -rf examples/*.rst
notebooks:
make -C examples notebooks
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
gen:
sphinx-autogen -t templates -o generated *.rst
@echo
@echo "auto-generated content finished building in $(BUILDDIR)/generated"
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyrcel.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyrcel.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/pyrcel"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyrcel"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.. _parcel:
.. currentmodule:: pyrcel
Parcel Model Details
====================
Below is the documentation for the parcel model, which is useful for debugging
and development. For a higher-level overview, see the :ref:`scientific description
<sci_descr>`.
Implementation
--------------
.. autoclass:: ParcelModel
:members: set_initial_conditions, run
Derivative Equation
-------------------
.. automethod:: parcel.der
.. _reference:
.. currentmodule:: pyrcel
Reference
=========
Main Parcel Model
-----------------
The core of the model has its own documentation page, which you can access :ref:`here <parcel>`.
.. autosummary::
:toctree: generated/
ParcelModel
Driver Tools
------------
.. automodule:: pyrcel.driver
.. autosummary::
:toctree: generated/
run_model
iterate_runs
Thermodynamics/Kohler Theory
----------------------------
.. automodule:: pyrcel.thermo
.. autosummary::
:toctree: generated/
dv
ka
rho_air
es
sigma_w
Seq
Seq_approx
kohler_crit
critical_curve
Aerosols
--------
.. automodule:: pyrcel.aerosol
.. autosummary::
:toctree: generated/
:template: class.rst
AerosolSpecies
The following are utility functions which might be useful in studying
and manipulating aerosol distributions for use in the :ref:`model <parcel>`
or activation routines.
.. autosummary::
:toctree: generated/
dist_to_conc
Distributions
-------------
.. automodule:: pyrcel.distributions
.. autosummary::
:toctree: generated/
:template: class.rst
BaseDistribution
Gamma
Lognorm
MultiModeLognorm
The following dictionaries containing (multi) Lognormal aerosol size distributions have also been saved for convenience:
1. ``FN2005_single_modes``: Fountoukis, C., and A. Nenes (2005), Continued development of a cloud droplet formation parameterization for global climate models, J. Geophys. Res., 110, D11212, doi:10.1029/2004JD005591
2. ``NS2003_single_modes``: Nenes, A., and J. H. Seinfeld (2003), Parameterization of cloud droplet formation in global climate models, J. Geophys. Res., 108, 4415, doi:10.1029/2002JD002911, D14.
3. ``whitby_distributions``: Whitby, K. T. (1978), The physical characteristics of sulfur aerosols, Atmos. Environ., 12(1-3), 135–159, doi:10.1016/0004-6981(78)90196-8.
4. ``jaenicke_distributions``: Jaenicke, R. (1993), Tropospheric Aerosols, in *Aerosol-Cloud-Climate Interactions*, P. V. Hobbs, ed., Academic Press, San Diego, CA, pp. 1-31.
Activation
----------
.. automodule:: pyrcel.activation
.. autosummary::
:toctree: generated/
lognormal_activation
binned_activation
multi_mode_activation
arg2000
mbn2014
shipwayabel2010
ming2006
.. _constants:
Constants
---------
.. automodule:: pyrcel.constants
.. _sci_descr:
Scientific Description
======================
The simplest tools available for describing the growth and evolution of
a cloud droplet spectrum from a given population of aerosols are based
on zero-dimensional, adiabatic cloud parcel models. By employing a
detailed description of the condensation of ambient water vapor onto the
growing droplets, these models can accurately describe the activation of
a subset of the aerosol population by predicting how the presence of the
aerosols in the updraft modify the maximum supersaturation achieved as
the parcel rises. Furthermore, these models serve as the theoretical
basis or reference for parameterizations of droplet activation which are
included in modern general circulation models ([Ghan2011]_) .
The complexity of these models varies with the range of physical processes
one wishes to study. At the most complex end of the spectrum, one might wish
to accurately resolve chemical transfer between the gas and aqueous phase in
addition to physical transformations such as collision/coalescence. One could
also add ice-phase processes to such a model.
Model Formulation
-----------------
The adiabatic cloud parcel model implemented here is based on
models described in the literature ([Nenes2001]_, [SP2006]_,) with some modifications and improvements. For a full description of the parcel model, please see ([Rothenberg2016]_)
The conservation of heat in a parcel of air rising at constant
velocity :math:`V` without entrainment can be written as
.. math:: \frac{dT}{dt} = -\frac{gV}{c_p} - \frac{L}{c_p}\frac{d w_v}{dt}
:label: dTdt
where :math:`T` is the parcel air temperature. Assuming adiabaticity
and neglecting entrainment is suitable for studying cloud droplet
formation near the cloud base, where the majority of droplet activation
occurs. Because the mass of water must be conserved as it changes from
the vapor to liquid phase, the relationship
.. math:: \frac{d w_v}{dt} = - \frac{dw_c}{dt}
:label: dw_vdt
must hold, where :math:`w_v` and :math:`w_c` are the mass mixing ratios
of water vapor and liquid water (condensed in droplets) in the parcel.
The rate of change of water in the liquid phase in the parcel is
governed solely by condensation onto the existing droplet population.
For a population of :math:`N_i` droplets of radius :math:`r_i`, where
:math:`i=1,\dots,n`, the total condensation rate is given by
.. math:: \frac{dw_c}{dt} = \frac{4\pi \rho_w}{\rho_a}\sum\limits_{i=1}^nN_ir_i^2\frac{dr_i}{dt}
:label: dw_cdt
Here, the particle growth rate, :math:`\frac{dr_i}{dt}` is calculated as
.. math:: \frac{dr_i}{dt} = \frac{G}{r_i}(S-S_{eq})
:label: dr_idt
where :math:`G` is a growth coefficient which is a function of the
physical and chemical properties of the particle receiving condensate,
given by
.. math:: G = \left(\frac{\rho_w R T}{e_s D'_v M_w} + \frac{L\rho_w[(LM_w/RT) - 1]}{k'_a T}\right)^{-1}
:label: G
Droplet growth via condensation is modulated by the difference between
the environmental supersaturation, :math:`S`, and the droplet
equilibrium supersaturation, :math:`S_{eq}`, predicted from Kohler
theory. To account for differences in aerosol chemical properties which
could affect the ability for particles to uptake water, the
:math:`\kappa`-Köhler theory parameterization ([PK2007]_) is employed in the
model. :math:`\kappa`-Kohler theory utilizes a single parameter to
describe aerosol hygroscopicity, and is widely employed in modeling of
aerosol processes. The hygroscopicity parameter :math:`\kappa` is
related to the water activity of an aqueous aerosol solution by
.. math:: \frac{1}{a_w} = 1 + \kappa\frac{V_s}{V_w}
where :math:`V_s` and :math:`V_w` are the volumes of dy particulate
matter and water in the aerosol solution. With this parameter, the full
:math:`\kappa`-Kohler theory may be expressed as
.. math:: S_{eq} = \frac{r_i^3 - r_{d,i}^3}{r_i^3 - r_{d,i}^3(1-\kappa_i)}\exp\left( \frac{2M_w\sigma_w}{RT\rho_w r_i} \right) - 1
:label: S_eq
where :math:`r_d` and :math:`r` are the dry aerosol particle size and
the total radius of the wetted aerosol. The surface tension of water,
:math:`\sigma_w`, is dependent on the temperature of the parcel such
that :math:`\sigma_w = 0.0761 - 1.55\times 10^{-4}(T-273.15)`
J/m\ :math:`^2` . Both the diffusivity and thermal conductivity of air
have been modified in the growth coefficient equation to account for
non-continuum effects as droplets grow, and are given by the expressions
.. math:: D'_v = D_v\bigg/\left(1 + \frac{D_v}{a_c r}\sqrt{\frac{2\pi M_w}{RT}}\right)
and
.. math:: k'_a = k_a\bigg/\left(1 + \frac{k_a}{a_T r \rho_a c_p}\sqrt{\frac{2\pi M_a}{RT}} \right)
In these expressions, the thermal accommodation coefficient,
:math:`a_T`, is assumed to be :math:`0.96` and the condensation
coefficient, :math:`a_c` is taken as unity (see :ref:`Constants <constants>`).
Under the adiabatic assumption, the evolution of the parcel’s
supersaturation is governed by the balance between condensational
heating as water vapor condenses onto droplets and cooling induced by
the parcel’s vertical motion,
.. math:: \frac{dS}{dt} = \alpha V - \gamma\frac{w_c}{dt}
:label: dSdt
where :math:`\alpha` and :math:`\gamma` are functions which are weakly
dependent on temperature and pressure :
.. math:: \alpha = \frac{gM_wL}{c_pRT^2} - \frac{gM_a}{RT}
.. math:: \gamma = \frac{PM_a}{e_sM_w} + \frac{M_wL^2}{c_pRT^2}
The parcel’s pressure is predicted using the hydrostatic relationship,
accounting for moisture by using virtual temperature (which can always
be diagnosed as the model tracks the specific humidity through the mass
mixing ratio of water vapor),
.. math:: \frac{dP}{dt} = \frac{-g P V}{R_d T_v}
:label: dPdt
The equations :eq:`dPdt`, :eq:`dSdt`, :eq:`dw_cdt`, :eq:`dw_vdt`,
and :eq:`dTdt` provide a simple, closed system of ordinary
differential equations which can be numerically integrated forward in
time. Furthermore, this model formulation allows the simulation of an
arbitrary configuration of initial aerosols, in terms of size, number
concentration, and hygroscopicity. Adding additional aerosol size bins
is simply accomplished by tracking one additional size bin in the system
of ODE’s. The important application of this feature is that the model
can be configured to simulate both internal or external mixtures of
aerosols, or some combination thereof.
Model Implementation and Procedure
----------------------------------
The parcel model described in the previous section was implemented using
a modern modular and object-oriented software engineering framework.
This framework allows the model to be simply configured with myriad
initial conditions and aerosol populations. It also enables model
components - such as the numerical solver or condensation
parameterization - to be swapped and replaced. Most importantly, the use
of object-oriented techniques allows the model to be incorporated into
frameworks which grossly accelerate the speed at which the model can be
evaluated. For instance, although models like the one developed here are
relatively cheap to execute, large ensembles of model runs have been
limited in scope to several hundred or a thousand runs. However, the
framework of this particular parcel model implementation was designed
such that it could be run as a black box as part of a massively-parallel
ensemble driver.
To run the model, a set of initial conditions needs to be specified,
which includes the updraft speed, the parcel’s initial temperature,
pressure, and supersaturation, and the aerosol population. Given these
parameters, the model calculates an initial equilibrium droplet spectrum
by computing the equilibrium wet radii of each aerosol. This is calculated
numerically from the Kohler equation for each aerosol/proto-droplet, or
numerically by employing the typical Kohler theory approximation
.. math:: S \approx \frac{A}{r} - \kappa\frac{r_d^3}{r^3}
These wet radii are used as the initial droplet radii in the simulation.
Once the initial conditions have been configured, the model is
integrated forward in time with a numerical solver (see :func:`ParcelModel.run`
for more details). The available solvers wrapped here are:
- LSODA(R)
- LSODE
- (C)VODE
During the model integration, the size representing each aerosol bin is
allowed to grow via condensation, producing something akin to a moving
grid. In the future, a fixed Eulerian
grid will likely be implemented in the model for comparison.
Aerosol Population Specification
--------------------------------
The model may be supplied with any arbitrary population of aerosols,
providing the population can be approximated with a sectional
representation. Most commonly, aerosol size distributions are
represented with a continuous lognormal distribution,
.. math:: n_N(r) = \frac{dN}{d \ln r} = \frac{N_t}{\sqrt{2\pi}\ln \sigma_g}\exp\left(-\frac{ \ln^2(r/\mu_g)}{2\ln^2\sigma_g}\right)
:label: lognormal
which can be summarized with the set of three parameters,
:math:`(N_t, \mu_g, \sigma_g)` and correspond, respectively, to the
total aerosol number concentration, the geometric mean or number mode
radius, and the geometric standard deviation. Complicated multi-modal
aerosol distributions can often be represented as the sum of several
lognormal distributions. Since the parcel model describes the evolution
of a discrete aerosol size spectrum, can be broken into :math:`n` bins,
and the continuous aerosol size distribution approximated by taking the
number concentration and size at the geometric mean value in each bin,
such that the discrete approximation to the aerosol size distribution
becomes
.. math:: n_{N,i}(r_i) = \sum\limits_{i=1}^n\frac{N_i}{\sqrt{2\pi}\ln\sigma_g}\exp\left(-\frac{\ln^2(r_i/\mu_g)}{2\ln^2\sigma_g}\right)
If no bounds on the size range of :math:`r_i` is specified, then the
model pre-computes :math:`n` equally-spaced bins over the logarithm of
:math:`r`, and covers the size range :math:`\mu_g/10\sigma_g` to
:math:`10\sigma_g\mu_g`. It is typical to run the model with :math:`200`
size bins per aerosol mode. Neither this model nor similar ones exhibit
much sensitivity towards the density of the sectional discretization .
Typically, a single value for hygroscopicity, :math:`\kappa` is
prescribed for each aerosol mode. However, the model tracks a
hygroscopicity parameter for each individual size bin, so size-dependent
aerosol composition can be incorporated into the aerosol population.
This representation of the aerosol population is similar to the external
mixing state assumption. An advantage to using this representation is
that complex mixing states can be represented by adding various size
bins, each with their own number concentration and hygroscopicity.
.. topic:: Reference
References
----------
.. [Nenes2001] Nenes, A., Ghan, S., Abdul-Razzak, H., Chuang, P. Y. & Seinfeld, J. H. Kinetic limitations on cloud droplet formation and impact on cloud albedo. Tellus 53, 133–149 (2001).
.. [SP2006] Seinfeld, J. H. & Pandis, S. N. Atmospheric Chemistry and Physics: From Air Pollution to Climate Change. Atmos. Chem. Phys. 2nd, 1203 (Wiley, 2006).
.. [Rothenberg2016] Daniel Rothenberg and Chien Wang, 2016: Metamodeling of Droplet Activation for Global Climate Models. *J. Atmos. Sci.*, **73**, 1255–1272. doi: http://dx.doi.org/10.1175/JAS-D-15-0223.1
.. [PK2007] Petters, M. D. & Kreidenweis, S. M. A single parameter representation of hygroscopic growth and cloud condensation nucleus activity. Atmos. Chem. Phys. 7, 1961–1971 (2007).
.. [Ghan2011] Ghan, S. J. et al. Droplet nucleation: Physically-based parameterizations and comparative evaluation. J. Adv. Model. Earth Syst. 3, M10001 (2011).
:mod:`{{module}}`.{{objname}}
{{ underline }}==============
.. currentmodule:: {{ module }}
.. autoclass:: {{ objname }}
:members:
:undoc-members:
{% block methods %}
.. automethod:: __init__
{% endblock %}
#cython: embedsignature:True
#cython: profile=False
#cython: boundscheck=False
#cython: cdivision=True
# (embedsignature adds doc-strings accessible by Sphinx)
""" Parcel model derivative calculations implemented in Cython.
"""
from cython.parallel cimport prange, parallel
from libc.math cimport exp, sqrt
import numpy as np
cimport numpy as np
cimport cython
#cimport openmp
import constants as c
## Define double DTYPE
DTYPE = np.float
## Thermodynamic/chemistry constants
cdef:
double Mw = c.Mw #: Molecular weight of water, kg/mol
double Ma = c.Ma #: Molecular weight of dry air, kg/mol
double R = c.R #: Universal gas constant, J/(mol K)
double rho_w = c.rho_w #: Density of water, kg/m**3
double Rd = c.Rd #: Gas constant for dry air, J/(kg K)
double Rv = c.Rv #: Gas constant for water vapor, J/(kg K)
double g = c.g #: Gravitational constant, m/s**2
double Dv = c.Dv #: Diffusivity of water vapor in air, m^2/s
double ac = c.ac #: Condensation Constant
double Ka = c.Ka #: Thermal conductivity of air, J/m/s/K
double at = c.at #: thermal accomodation coefficient
double L = c.L #: Latent heat of condensation, J/kg
double Cp = c.Cp #: Specific heat of dry air at constant pressure, J/kg
double PI = 3.14159265358979323846264338328 #: Pi, constant
int N_STATE_VARS = c.N_STATE_VARS #: number of thermodynamic state variables
## Auxiliary, single-value calculations with GIL released for derivative
## calculations
cdef inline double sigma_w(double T) nogil:
"""See :func:`pyrcel.micro.sigma_w` for full documentation
"""
return 0.0761 - (1.55e-4)*(T-273.15)
cdef inline double ka(double T, double r, double rho) nogil:
"""See :func:`pyrcel.micro.ka` for full documentation
"""
cdef double denom, ka_cont
ka_cont = 1e-3*(4.39 + 0.071*T)
denom = 1.0 + (ka_cont/(at*r*rho*Cp))*sqrt((2*PI*Ma)/(R*T))
return ka_cont/denom
cdef inline double dv(double T, double r, double P, double accom) nogil:
"""See :func:`pyrcel.micro.dv` for full documentation
"""
cdef double denom, dv_cont, P_atm
P_atm = P*1.01325e-5 # Pa -> atm
dv_cont = 1e-4*(0.211/P_atm)*((T/273.)**1.94)
denom = 1.0 + (dv_cont/(accom*r))*sqrt((2*PI*Mw)/(R*T))
return dv_cont/denom
cdef inline double es(double T):
"""See :func:`pyrcel.micro.es` for full documentation
"""
return 611.2*exp(17.67*T/(T+243.5))
cdef double Seq(double r, double r_dry, double T, double kappa) nogil:
"""See :func:`pyrcel.micro.Seq` for full documentation.
"""
cdef double A = (2.*Mw*sigma_w(T))/(R*T*rho_w*r)
cdef double B = 1.0
if kappa > 0.0:
B = (r**3 - (r_dry**3))/(r**3 - (r_dry**3)*(1.-kappa))
cdef double returnval = exp(A)*B - 1.0
return returnval
## RHS Derivative callback function
def der(double[::1] y, double t,
int nr, double[::1] r_drys, double[::1] Nis, double V, double[::1] kappas,
double accom):
"""See :func:`pyrcel.parcel.der` for full documentation
"""
cdef double z = y[0]
cdef double P = y[1]
cdef double T = y[2]
cdef double wv = y[3]
cdef double wc = y[4]
cdef double wi = y[5]
cdef double S = y[6]
cdef double[::1] rs = y[N_STATE_VARS:]
cdef double T_c = T-273.15 # convert temperature to Celsius
cdef double pv_sat = es(T_c) # saturation vapor pressure
cdef double wv_sat = wv/(S+1.) # saturation mixing ratio
cdef double Tv = (1.+0.61*wv)*T
cdef double e = (1. + S)*pv_sat # water vapor pressure
## Compute air densities from current state
cdef double rho_air = P/Rd/Tv
cdef double rho_air_dry = (P-e)/Rd/T #: TODO - port to parcel.py
## Begin computing tendencies
cdef:
double dwc_dt, dwv_dt, dwi_dt, dT_dt, dS_dt
double[::1] drs_dt, x
dP_dt = -1.*rho_air*g*V
dwc_dt = 0.0
drs_dt = np.empty(shape=(nr), dtype="d")
cdef: # variables set in parallel loop
unsigned int i
double G_a, G_b, G
double r, r_dry, delta_S, kappa, dr_dt, Ni
double dv_r, ka_r, P_atm, A, B, Seq_r
for i in prange(nr, nogil=True):#, schedule='static'):#, num_threads=40):
#for i in range(nr):
r = rs[i]
r_dry = r_drys[i]
kappa = kappas[i]
Ni = Nis[i]
## Non-continuum diffusivity/thermal conductivity of air near
## near particle
dv_r = dv(T, r, P, accom)
ka_r = ka(T, r, rho_air)
## Condensation coefficient
G_a = (rho_w*R*T)/(pv_sat*dv_r*Mw)
G_b = (L*rho_w*((L*Mw/(R*T))-1.))/(ka_r*T)
G = 1./(G_a + G_b)
## Difference between ambient and particle equilibrium supersaturation
Seq_r = Seq(r, r_dry, T, kappa)
delta_S = S - Seq_r
## Size and liquid water tendencies
dr_dt = (G/r)*delta_S
dwc_dt += Ni*r*r*dr_dt # Contribution to liq. water tendency due to growth
drs_dt[i] = dr_dt
dwc_dt *= (4.*PI*rho_w/rho_air_dry) # Hydrated aerosol size -> water mass
# use rho_air_dry for mixing ratio definition consistency
# No freezing implemented yet
dwi_dt = 0.0
## MASS BALANCE CONSTRAINT
dwv_dt = -1.*(dwc_dt + dwi_dt)
## ADIABATIC COOLING
dT_dt = -g*V/Cp - L*dwv_dt/Cp
dz_dt = V
''' Alternative methods for calculation supersaturation tendency
# Used eq 12.28 from Pruppacher and Klett in stead of (9) from Nenes et al, 2001
#cdef double S_a, S_b, S_c, dS_dt
#cdef double S_b_old, S_c_old, dS_dt_old
#S_a = (S+1.0)
## NENES (2001)
#S_b_old = dT_dt*wv_sat*(17.67*243.5)/((243.5+(Tv-273.15))**2.)
#S_c_old = (rho_air*g*V)*(wv_sat/P)*((0.622*L)/(Cp*Tv) - 1.0)
#dS_dt_old = (1./wv_sat)*(dwv_dt - S_a*(S_b_old-S_c_old))
## PRUPPACHER (PK 1997)
#S_b = dT_dt*0.622*L/(Rd*T**2.)
#S_c = g*V/(Rd*T)
#dS_dt = P*dwv_dt/(0.622*es(T-273.15)) - S_a*(S_b + S_c)
## SEINFELD (SP 1998)
#S_b = L*Mw*dT_dt/(R*T**2.)
#S_c = V*g*Ma/(R*T)
#dS_dt = dwv_dt*(Ma*P)/(Mw*es(T-273.15)) - S_a*(S_b + S_c)
'''
## GHAN (2011)
cdef double alpha, gamma
alpha = (g*Mw*L)/(Cp*R*(T**2)) - (g*Ma)/(R*T)
gamma = (P*Ma)/(Mw*pv_sat) + (Mw*L*L)/(Cp*R*T*T)
dS_dt = alpha*V - gamma*dwc_dt
x = np.empty(shape=(nr+N_STATE_VARS), dtype='d')
x[0] = dz_dt
x[1] = dP_dt
x[2] = dT_dt
x[3] = dwv_dt
x[4] = dwc_dt
x[5] = dwi_dt
x[6] = dS_dt
x[N_STATE_VARS:] = drs_dt[:]
return x

Sorry, the diff of this file is not supported yet

#! /usr/bin/env python
try:
from setuptools import setup
HAS_SETUPTOOLS = True
except ImportError:
from distutils.core import setup
# Must have Cython!!
from Cython.Distutils import build_ext
from distutils.extension import Extension
import numpy
import os
import warnings
from textwrap import dedent
MAJOR, MINOR, MICRO = 1, 3, 1
DEV = False
VERSION = "{}.{}.{}".format(MAJOR, MINOR, MICRO)
# Correct versioning with git info if DEV
if DEV:
import subprocess
pipe = subprocess.Popen(
['git', "describe", "--always", "--match", "'v[0-9]*'"],
stdout=subprocess.PIPE
)
so, err = pipe.communicate()
if pipe.returncode != 0:
# no git or something wrong with git (not in dir?)
warnings.warn("WARNING: Couldn't identify git revision, using generic version string")
VERSION += ".dev"
else:
git_rev = so.strip()
git_rev = git_rev.decode('ascii') # necessary for Python >= 3
VERSION += ".dev-" + format(git_rev)
extensions = [
Extension("pyrcel.parcel_aux",
["pyrcel/parcel_aux.pyx"],
include_dirs=[numpy.get_include(), ],
#extra_compile_args=['-fopenmp', ],
#extra_link_args=['-fopenmp', ],
),
#Extension("parcel_aux_bin", ["parcel_aux_bin.pyx"],
# libraries=cgsl.get_libraries(),
# library_dirs=[cgsl.get_library_dir(), ],
# include_dirs=[numpy.get_include(), cgsl.get_cython_include_dir()],
# ),
]
def _write_version_file():
fn = os.path.join(os.path.dirname(__file__), 'pyrcel', 'version.py')
version_str = dedent("""
__version__ = '{}'
""")
# Write version file
with open(fn, 'w') as version_file:
version_file.write(version_str.format(VERSION))
# Write version and install
_write_version_file()
setup(
name = "pyrcel",
author = "Daniel Rothenberg",
author_email = "darothen@mit.edu",
maintainer = "Daniel Rothenberg",
maintainer_email = "darothen@mit.edu",
description = "pyrcel: 0D adiabatic cloud parcel model",
long_description = """
This code implements a relatively simple, adiabatic cloud parcel model for studying aerosol
activation. It allows the user to peform and iterate parcel model experiments in a simple
fashion through the use of an object-oriented implementation. Furthermore, interfaces for
several numerical solvers are built into the model so that users can choose whatever scheme
they would like to run with the model.
""",
license = "New BSD (3-clause)",
url = "https://github.com/darothen/pyrcel",
version = VERSION,
download_url = "https://github.com/darothen/pyrcel",
# TODO: Update install requirements and corresponding documentation
install_requires = ['numpy', 'scipy', 'pandas', 'future'],
packages = ["pyrcel", ],
package_data = {
'pyrcel': ['data/std_atm.csv', ],
},
scripts = [
'scripts/run_parcel',
],
ext_modules = extensions,
cmdclass = {'build_ext': build_ext},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Fortran',
'Topic :: Scientific/Engineering :: Atmospheric Science',
],
)