Skip to content

Commit

Permalink
remove env test. fix subdataset check
Browse files Browse the repository at this point in the history
  • Loading branch information
scottstanie committed Jul 7, 2023
1 parent 6a4a42b commit 9f264b3
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 42 deletions.
23 changes: 14 additions & 9 deletions src/dolphin/workflows/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def _check_network_type(cls, values):
" `max_temporal_baseline` can be set."
)
if max_tb is not None:
values["network_type"] = InterferogramNetworkType.TEMPORAL_BASELINE
values["network_type"] = InterferogramNetworkType.MAX_TEMPORAL_BASELINE
return values

if max_bw is not None:
Expand Down Expand Up @@ -432,14 +432,19 @@ def _check_slc_files_exist(cls, values):
# If they're HDF5/NetCDF files, we need to check that the subdataset exists
if ext in [".h5", ".nc"]:
subdataset = input_options.subdataset
if subdataset is None and cls._is_opera_file_list(file_list):
# Assume that the user forgot to set the subdataset, and set it to the
# default OPERA dataset name
logger.info(
"CSLC files look like OPERA files, setting subdataset to"
f" {OPERA_DATASET_NAME}."
)
subdataset = input_options.subdataset = OPERA_DATASET_NAME
if subdataset is None:
if cls._is_opera_file_list(file_list):
# Assume that the user forgot to set the subdataset, and set it to the
# default OPERA dataset name
logger.info(
"CSLC files look like OPERA files, setting subdataset to"
f" {OPERA_DATASET_NAME}."
)
subdataset = input_options.subdataset = OPERA_DATASET_NAME
else:
raise ValueError(
"Must provide subdataset name for input HDF5 files."
)

# Coerce the file_list to a sorted list of Path objects
file_list, _ = sort_files_by_date(file_list, file_date_fmt=date_fmt)
Expand Down
34 changes: 1 addition & 33 deletions tests/test_workflows_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,38 +102,6 @@ def test_worker_settings_defaults():
assert ws.block_size_gb == 1.0


def test_worker_env_defaults(monkeypatch):
# Change environment with monkeypatch
# https://docs.pytest.org/en/latest/how-to/monkeypatch.html
monkeypatch.setenv("dolphin_gpu_enabled", "False")
ws = config.WorkerSettings()
assert ws.gpu_enabled is False
monkeypatch.delenv("dolphin_gpu_enabled")

# "gpu" doesn't need the dolphin_ prefix
monkeypatch.setenv("gpu", "False")
ws = config.WorkerSettings()
assert ws.gpu_enabled is False

# Case shouldn't matter (since i'm not specifying that it does)
monkeypatch.setenv("Gpu", "False")
ws = config.WorkerSettings()
assert ws.gpu_enabled is False

# Check that we need the dolphin_ prefix
monkeypatch.setenv("N_WORKERS", "8")
ws = config.WorkerSettings()
assert ws.n_workers == cpu_count() # should still be old default

monkeypatch.setenv("DOLPHIN_N_WORKERS", "8")
ws = config.WorkerSettings()
assert ws.n_workers == 8

monkeypatch.setenv("DOLPHIN_BLOCK_SIZE_GB", "4.5")
ws = config.WorkerSettings()
assert ws.block_size_gb == 4.5


@pytest.fixture()
def dir_with_1_slc(tmp_path, slc_file_list_nc):
p = tmp_path / "slc"
Expand Down Expand Up @@ -214,7 +182,7 @@ def test_input_glob_pattern(slc_file_list_nc):
def test_input_nc_missing_subdataset(slc_file_list_nc):
cslc_dir = Path(slc_file_list_nc[0]).parent

with pytest.raises(pydantic.ValidationError, match="Must provide dataset name"):
with pytest.raises(pydantic.ValidationError, match="Must provide subdataset name"):
config.Workflow(cslc_file_list=cslc_dir / "slclist.txt")


Expand Down

0 comments on commit 9f264b3

Please sign in to comment.