Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,12 @@ For instance, the following command, using the included sample data will grid
one minute of data (3 GLM files) on the ABI fixed grid in the CONUS sector at 2
km resolution. These images will overlay precisely on the ABI cloud tops, and
will have parallax with respect to ground for all the same reasons ABI does.
Output will be placed in the current directory in a new 2018/Jul/02 directory
created by default.

.. code-block:: bash

python make_GLM_grids.py -o /path/to/output/
python make_GLM_grids.py
--fixed_grid --split_events \
--goes_position east --goes_sector conus \
--dx=2.0 --dy=2.0 \
Expand All @@ -109,7 +111,7 @@ If you don't need the whole conus sector, you can instead plot on a mesoscale do

.. code-block:: bash

python make_GLM_grids.py -o /path/to/output/
python make_GLM_grids.py
--fixed_grid --split_events \
--goes_position east --goes_sector meso \
--dx=2.0 --dy=2.0 \
Expand All @@ -122,7 +124,7 @@ Finally, if you want a fully custom grid size, you can omit the ``--goes_sector`

.. code-block:: bash

python make_GLM_grids.py -o /path/to/output/
python make_GLM_grids.py
--fixed_grid --split_events \
--goes_position east \
--dx=2.0 --dy=2.0 --width="1000.0" --height="500.0" \
Expand Down
33 changes: 20 additions & 13 deletions examples/grid/make_GLM_grids.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,28 @@
Grid spacing is regular in latitude and longitude with the grid box
being sized to match the requested dx, dy at the center of the grid.

Within the output directory, a year/month/day directory will be created,
e.g., 2017/Jul/04/, and within that directory the grid files will be created.

Therefore, this script can be used to process multiple days and they will
be written to a standardized directory structure.
By default, data will be saved to the current directory according to the
standard GOES imagery naming convention. This behavior can be fully controlled
by adjusting the -o argument.
"""

output_help = """Specify the output path and filename using a configurable path
template. -o ./{dataset_name} (the default) will generate files in the current
directory using the standard GOES imagery naming convention, including a .nc
extension. Any intermediate directories will be created as needed. All allowed
names in the template are listed in the docs for
glmtools.io.imagery.write_goes_imagery. For example: this script can be used to
process multiple days and that are written to a standardized directory
structure by specifying a path like so: -o
{start_time:%%Y/%%b/%%d}/{dataset_name}"""

def create_parser():
parser = argparse.ArgumentParser(description=parse_desc)
parser.add_argument(dest='filenames',metavar='filename', nargs='*')
parser.add_argument('-o', '--output_dir', metavar='directory',
required=True, dest='outdir', action='store', )
parser.add_argument('-o', '--output_path',
metavar='filename template including path',
required=False, dest='outdir', action='store',
default='./{dataset_name}', help=output_help)
parser.add_argument('--ctr_lat', metavar='latitude', required=False,
dest='ctr_lat', action='store', type=float,
help='center latitude')
Expand Down Expand Up @@ -180,13 +191,9 @@ def grid_setup(args):
end_time = max(filename_ends)

date = datetime(start_time.year, start_time.month, start_time.day)
# grid_dir = os.path.join('/data/LCFA-production/', 'grid_test')
# outpath = grid_dir+'/20%s' %(date.strftime('%y/%b/%d'))
outpath = os.path.join(args.outdir, '20%s' %(date.strftime('%y/%b/%d')))
if os.path.exists(outpath) == False:
os.makedirs(outpath)
# subprocess.call(['chmod', 'a+w', outpath, grid_dir+'/20%s' %(date.strftime('%y/%b')), grid_dir+'/20%s' %(date.strftime('%y'))])

outpath = args.outdir

if args.fixed_grid:
proj_name = 'geos'

Expand Down
23 changes: 13 additions & 10 deletions examples/plot_glm_test_data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@
"duration = timedelta(0, 60*5)\n",
"enddate = startdate+duration\n",
" \n",
"cmd = \"python ./grid/make_GLM_grids.py -o {1}\"\n",
"cmd = \"python /Users/ebruning/code/glmtools/examples/grid/make_GLM_grids.py\"\n",
"cmd += \" -o {1}/{{start_time:%Y/%b/%d}}/{{dataset_name}}\"\n",
"cmd += \" --fixed_grid --split_events --float_output\"\n",
"cmd += \" --goes_position=east --goes_sector=meso\"\n",
"cmd += \" --ctr_lat=33.5 --ctr_lon=-101.5 --dx=2.0 --dy=2.0\"\n",
Expand Down Expand Up @@ -147,9 +148,7 @@
{
"cell_type": "code",
"execution_count": 29,
"metadata": {
"scrolled": false
},
"metadata": {},
"outputs": [
{
"data": {
Expand Down Expand Up @@ -1138,7 +1137,10 @@
"cell_type": "code",
"execution_count": 10,
"metadata": {
"collapsed": true
"collapsed": true,
"jupyter": {
"outputs_hidden": true
}
},
"outputs": [],
"source": [
Expand All @@ -1161,7 +1163,10 @@
"cell_type": "code",
"execution_count": 11,
"metadata": {
"collapsed": true
"collapsed": true,
"jupyter": {
"outputs_hidden": true
}
},
"outputs": [],
"source": [
Expand Down Expand Up @@ -1199,9 +1204,7 @@
{
"cell_type": "code",
"execution_count": 41,
"metadata": {
"scrolled": false
},
"metadata": {},
"outputs": [
{
"data": {
Expand Down Expand Up @@ -2130,5 +2133,5 @@
}
},
"nbformat": 4,
"nbformat_minor": 1
"nbformat_minor": 4
}
17 changes: 16 additions & 1 deletion glmtools/grid/make_grids.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,7 +596,7 @@ def output_setup(self, *args, **kwargs):
self.divide_grids[2]=0
self.divide_grids[6]=4

def write_grids(self, outpath = '', output_writer = None,
def write_grids(self, outpath = './{dataset_name}', output_writer = None,
output_writer_3d = None,
output_filename_prefix = None, output_kwargs={}):

Expand Down Expand Up @@ -777,6 +777,15 @@ def grid_GLM_flashes(GLM_filenames, start_time, end_time, **kwargs):
Passed to GLMGridder.write_grids:
outpath, output_writer, output_writer_3d,
output_kwargs, output_filename_prefix
For GLMlutGridder.write_grids, all of the above are passed,
but only output_kwargs and outpath are used.
outpath can be a template string; defaults to {'./{dataset_name}'}
Available named arguments in the template are:
dataset_name: standard GOES imagery format, includes '.nc'. Looks like
OR_GLM-L2-GLMM1-M3_G16_s20181830432000_e20181830433000_c20200461148520.nc
start_time, end_time: datetimes that can be used with strftime syntax, e.g.
'./{start_time:%y/%b/%d}/GLM_{start_time:%Y%m%d_%H%M%S}.nc'

Remaining keyword arguments are passed to the GLMGridder on initialization.
"""

Expand Down Expand Up @@ -876,6 +885,8 @@ def proc_each_grid(subgrid, start_time=None, end_time=None, GLM_filenames=None):
process_flash_kwargs_ij['clip_events'] = mesh
log.debug(("XEDGE", subgridij, xedge.min(), xedge.max(), xedge.shape))
log.debug(("YEDGE", subgridij, yedge.min(), yedge.max(), yedge.shape))

saved_first_file_metadata = False
for filename in GLM_filenames:
# Could create a cache of GLM objects by filename here.
log.info("Processing {0}".format(filename))
Expand All @@ -890,6 +901,10 @@ def proc_each_grid(subgrid, start_time=None, end_time=None, GLM_filenames=None):
# xarray 0.12.1 (and others?) throws an error when trying to load
# data from an empty dimension.
glm.dataset.load()

if not saved_first_file_metadata:
gridder.first_file_attrs = dict(glm.dataset.attrs)
saved_first_file_metadata = True
gridder.process_flashes(glm, **process_flash_kwargs_ij)
else:
log.info("Skipping {0} - number of events is 0".format(filename))
Expand Down
47 changes: 34 additions & 13 deletions glmtools/io/imagery.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,7 @@ def new_goes_imagery_dataset(x, y, nadir_lon):
# Dimensions
dims = ('y', 'x')

scene_id = infer_scene_from_dataset(x, y)
scene_id, nominal_resolution = infer_scene_from_dataset(x, y)
log.debug("Span of grid implies scene is {0}".format(scene_id))

# Coordinate data: x, y
Expand All @@ -390,7 +390,7 @@ def new_goes_imagery_dataset(x, y, nadir_lon):
d.x.attrs.update(xc.attrs)
d.y.attrs.update(yc.attrs)

return d, scene_id
return d, scene_id, nominal_resolution

def xy_to_2D_lonlat(gridder, x_coord, y_coord):
self = gridder
Expand All @@ -417,11 +417,19 @@ def pairwise(iterable):

def infer_scene_from_dataset(x, y):
"Infer whether the scene matches one of the GOES-R fixed grid domains."
from lmatools.grid.fixed import goesr_conus, goesr_meso, goesr_full
from lmatools.grid.fixed import goesr_conus, goesr_meso, goesr_full, goesr_resolutions
rtol = 1.0e-2

# Try to match up the actual spacing in microradians with a known resolutions
dx = np.abs(x[1]-x[0])
resolution = '{:d}microradian at nadir'.format(int(np.round(dx*1e6)))
for km, microrad in goesr_resolutions.items():
if np.allclose(microrad, dx, rtol=rtol):
resolution = km.replace('.0', '') + ' at nadir'

spanEW = x.max() - x.min()
spanNS = y.max() - y.min()
log.debug("Inferring scene from spans x={0}, y={1}".format(spanEW, spanNS))
rtol = 1.0e-2
if (np.allclose(spanEW, goesr_full['spanEW'], rtol=rtol) &
np.allclose(spanNS, goesr_full['spanNS'], rtol=rtol) ):
scene_id = "FULL"
Expand All @@ -433,17 +441,23 @@ def infer_scene_from_dataset(x, y):
scene_id = "MESO1"
else:
scene_id = "OTHER"
return scene_id
return scene_id, resolution

def write_goes_imagery(gridder, outpath='.', pad=None, scale_and_offset=True):
def write_goes_imagery(gridder, outpath='./{dataset_name}', pad=None, scale_and_offset=True):
""" pad is a tuple of x_slice, y_slice: slice objects used to index the
zeroth and first dimensions, respectively, of the grids in gridder.

scale_and_offset controls whether to write variables as scaled ints.
if False, floating point grids will be written.


outpath can be a template string; defaults to {'./{dataset_name}'}
Available named arguments in the template are:
dataset_name: standard GOES imagery format, includes '.nc'. Looks like
OR_GLM-L2-GLMM1-M3_G16_s20181830432000_e20181830433000_c20200461148520.nc
start_time, end_time: datetimes that can be used with strftime syntax, e.g.
'./{start_time:%y/%b/%d}/GLM_{start_time:%Y%m%d_%H%M%S}.nc'
Intermediate directories will be created to match outpath.
"""
# output_filename_prefix="LMA", **output_kwargs):
self = gridder
if pad is not None:
x_slice, y_slice = pad
Expand Down Expand Up @@ -479,19 +493,26 @@ def write_goes_imagery(gridder, outpath='.', pad=None, scale_and_offset=True):
# upper left in the GOES-R series L1b PUG (section 5.1.2.6 Product Data
# Structures). Later, to follow the image array convention will
# transpose the grids and then flipud.
dataset, scene_id = new_goes_imagery_dataset(x_coord,
dataset, scene_id, nominal_resolution = new_goes_imagery_dataset(x_coord,
np.flipud(y_coord), nadir_lon)

# Global metadata
l2lcfa_attrs = gridder.first_file_attrs

global_attrs = get_glm_global_attrs(start, end,
"G16", "GOES-East", "GLM-1", scene_id,
"2km at nadir", "ABI Mode 3", "DE", "Postprocessed", "TTU"
l2lcfa_attrs['platform_ID'], l2lcfa_attrs['orbital_slot'],
l2lcfa_attrs['instrument_ID'], scene_id,
nominal_resolution, "ABI Mode 3", "DE", "Postprocessed", "TTU"
)
dataset = dataset.assign_attrs(**global_attrs)
# log.debug("*** Checking x coordinate attrs initial")
# log.debug(dataset.x.attrs)

outfile = os.path.join(outpath, dataset.attrs['dataset_name'])

outfile = outpath.format(start_time=start, end_time=end,
dataset_name=dataset.attrs['dataset_name'])
enclosing_dir = os.path.dirname(outfile)
if os.path.exists(enclosing_dir) == False:
os.makedirs(enclosing_dir)

# Adding a new variable to the dataset below clears the coord attrs
# so hold on to them for now.
Expand Down