Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

issue 104 #114

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions src/nsidc/metgen/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ def init(config):
"""Populates a configuration file based on user input."""
click.echo(metgen.banner())
config = metgen.init_config(config)
# add step here to evaluate input files?
# if netcdf: header information
click.echo(f"Initialized the metgen configuration file {config}")


Expand Down Expand Up @@ -67,6 +69,17 @@ def validate(config_filename, content_type):
metgen.init_logging(configuration)
metgen.validate(configuration, content_type)

@cli.command()
@click.option(
"-f",
"--file",
"data_filename",
help="Path to sample data file",
required=True,
)
def assess():
"""Examine a sample data file for metadata completeness"""
return True

@cli.command()
@click.option(
Expand Down
7 changes: 5 additions & 2 deletions src/nsidc/metgen/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ class Config:
number: int
dry_run: bool

def __post_init__(self):
self.collection = None
juliacollins marked this conversation as resolved.
Show resolved Hide resolved

def show(self):
# TODO: add section headings in the right spot
# (if we think we need them in the output)
Expand All @@ -43,8 +46,8 @@ def show(self):
if self.dry_run:
LOGGER.info("")
LOGGER.info(
"""Note: The dry-run option was included, so no files will be\
staged and no CNM messages published."""
"""Note: The dry-run option was included, so no files will be \
staged and no CNM messages published."""
)
LOGGER.info("")

Expand Down
47 changes: 14 additions & 33 deletions src/nsidc/metgen/metgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
import os.path
import sys
import uuid
from collections.abc import Callable
from importlib.resources import open_text
from pathlib import Path
from string import Template
from typing import Callable

import jsonschema
from funcy import all, filter, partial, rcompose, take
Expand Down Expand Up @@ -175,31 +175,6 @@ def init_config(configuration_file):
return configuration_file


def prepare_output_dirs(configuration):
"""
Generate paths to ummg and cnm output directories.
Remove any existing UMM-G files if needed.
TODO: create local_output_dir, ummg_dir, and cnm subdir if they don't exist
"""
ummg_path = Path(configuration.local_output_dir, configuration.ummg_dir)
cnm_path = Path(configuration.local_output_dir, "cnm")

if configuration.overwrite_ummg:
scrub_json_files(ummg_path)

return (ummg_path, cnm_path)


def scrub_json_files(path):
print(f"Removing existing files in {path}")
for file_path in path.glob("*.json"):
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
except Exception as e:
print("Failed to delete %s: %s" % (file_path, e))


# -------------------------------------------------------------------
# Data structures for processing Granules and recording results
# -------------------------------------------------------------------
Expand All @@ -211,6 +186,7 @@ class Collection:

auth_id: str
version: int
data_reader: Callable[[str], dict]


@dataclasses.dataclass
Expand Down Expand Up @@ -255,7 +231,15 @@ def process(configuration: config.Config) -> None:
"""
Process all Granules and record the results and summary.
"""
# TODO: Do any prep actions, like mkdir, etc
# TODO:
# Do any prep actions, like mkdir, etc
# Get real collection information from CMR
# Determine data reader based on actual data file characteristics (e.g. suffix)
configuration.collection = Collection(
juliacollins marked this conversation as resolved.
Show resolved Hide resolved
configuration.auth_id,
configuration.version,
netcdf_reader.extract_metadata
)

# Ordered list of operations to perform on each granule
operations = [
Expand Down Expand Up @@ -361,13 +345,10 @@ def null_operation(configuration: config.Config, granule: Granule) -> Granule:

def granule_collection(configuration: config.Config, granule: Granule) -> Granule:
juliacollins marked this conversation as resolved.
Show resolved Hide resolved
"""
Find the Granule's Collection and add it to the Granule.
Associate the Collection with the Granule.
"""
# TODO: When we start querying CMR, refactor the pipeline to retrieve
# collection information from CMR once, then associate it with each
juliacollins marked this conversation as resolved.
Show resolved Hide resolved
# granule.
return dataclasses.replace(
granule, collection=Collection(configuration.auth_id, configuration.version)
granule, collection=configuration.collection
)


Expand Down Expand Up @@ -418,7 +399,7 @@ def create_ummg(configuration: config.Config, granule: Granule) -> Granule:
# }
metadata_details = {}
for data_file in granule.data_filenames:
metadata_details[data_file] = netcdf_reader.extract_metadata(data_file)
metadata_details[data_file] = granule.collection.data_reader(data_file)

# Collapse information about (possibly) multiple files into a granule summary.
summary = metadata_summary(metadata_details)
Expand Down
14 changes: 11 additions & 3 deletions src/nsidc/metgen/netcdf_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@


def extract_metadata(netcdf_path):
# provide some sort of "review" command line function to
# assess what's missing from netcdf file?
# or add to ini file generator a step that evaluates an
# example file for completeness?
"""
Read the content at netcdf_path and return a structure with temporal coverage
information, spatial coverage information, file size, and production datetime.
Expand All @@ -20,7 +24,9 @@ def extract_metadata(netcdf_path):

return {
"size_in_bytes": os.path.getsize(netcdf_path),
# no date modified in file
"production_date_time": ensure_iso(netcdf.attrs["date_modified"]),
# no time range in file
"temporal": time_range(netcdf),
"geometry": {"points": json.dumps(spatial_values(netcdf))},
}
Expand Down Expand Up @@ -51,9 +57,7 @@ def spatial_values(netcdf):
crs_4326 = CRS.from_epsg(4326)
xformer = Transformer.from_crs(data_crs, crs_4326, always_xy=True)

# Adding padding should give us values that match up to the
# netcdf.attrs.geospatial_bounds
pad = abs(float(netcdf.crs.GeoTransform.split()[1])) / 2
pad = pixel_padding(netcdf)
xdata = [x - pad if x < 0 else x + pad for x in netcdf.x.data]
ydata = [y - pad if y < 0 else y + pad for y in netcdf.y.data]

Expand All @@ -65,6 +69,10 @@ def spatial_values(netcdf):
for (lon, lat) in perimeter
]

def pixel_padding(netcdf):
# Adding padding should give us values that match up to the
# netcdf.attrs.geospatial_bounds
return abs(float(netcdf.crs.GeoTransform.split()[1])) / 2

def thinned_perimeter(xdata, ydata):
"""
Expand Down
1 change: 1 addition & 0 deletions tests/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def expected_keys():
"checksum_type",
"number",
"dry_run",
"collection"
]
)

Expand Down
12 changes: 10 additions & 2 deletions tests/test_metgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,14 +102,22 @@ def test_returns_datetime_range():


def test_s3_object_path_has_no_leading_slash():
granule = metgen.Granule("foo", metgen.Collection("ABCD", 2), uuid="abcd-1234")
granule = metgen.Granule(
"foo",
metgen.Collection("ABCD", 2, "my_reader"),
uuid="abcd-1234"
)
expected = "external/ABCD/2/abcd-1234/xyzzy.bin"
assert metgen.s3_object_path(granule, "xyzzy.bin") == expected


def test_s3_url_simple_case():
staging_bucket_name = "xyzzy-bucket"
granule = metgen.Granule("foo", metgen.Collection("ABCD", 2), uuid="abcd-1234")
granule = metgen.Granule(
"foo",
metgen.Collection("ABCD", 2, "my_reader"),
uuid="abcd-1234"
)
expected = "s3://xyzzy-bucket/external/ABCD/2/abcd-1234/xyzzy.bin"
assert metgen.s3_url(staging_bucket_name, granule, "xyzzy.bin") == expected

Expand Down
Loading