2929 from xarray import Dataset as xr_Dataset
3030
3131
32- from tests .integration .testing_helpers import UNITS_FEET_PER_SECOND
33- from tests .integration .testing_helpers import UNITS_FOOT
3432from tests .integration .testing_helpers import UNITS_METER
3533from tests .integration .testing_helpers import UNITS_METERS_PER_SECOND
34+ from tests .integration .testing_helpers import UNITS_MILLISECOND
3635from tests .integration .testing_helpers import UNITS_NONE
3736from tests .integration .testing_helpers import UNITS_SECOND
3837from tests .integration .testing_helpers import get_teapot_segy_spec
5150from mdio .core import Dimension
5251
5352
54- class PostStack3DVelocityTemplate (Seismic3DPostStackTemplate ):
53+ class PostStack3DVelocityMetricTemplate (Seismic3DPostStackTemplate ):
5554 """Custom template that uses 'velocity' as the default variable name instead of 'amplitude'."""
5655
5756 @property
5857 def _default_variable_name (self ) -> str :
5958 """Override the default variable name."""
6059 return "velocity"
6160
62- def __init__ (self , data_domain : str , is_metric : bool ) -> None :
61+ def __init__ (self , data_domain : str ) -> None :
6362 super ().__init__ (data_domain )
64- if is_metric :
65- self ._units .update (
66- {
67- "time" : UNITS_SECOND ,
68- "cdp_x" : UNITS_METER ,
69- "cdp_y" : UNITS_METER ,
70- "velocity" : UNITS_METERS_PER_SECOND ,
71- }
72- )
73- else :
74- self ._units .update (
75- {
76- "time" : UNITS_SECOND ,
77- "cdp_x" : UNITS_FOOT ,
78- "cdp_y" : UNITS_FOOT ,
79- "velocity" : UNITS_FEET_PER_SECOND ,
80- }
81- )
63+ self ._units .update (
64+ {
65+ "time" : UNITS_MILLISECOND ,
66+ "cdp_x" : UNITS_METER ,
67+ "cdp_y" : UNITS_METER ,
68+ "velocity" : UNITS_METERS_PER_SECOND ,
69+ }
70+ )
8271
8372 @property
8473 def _name (self ) -> str :
@@ -95,25 +84,25 @@ def _create_empty_mdio(cls, create_headers: bool, output_path: Path, overwrite:
9584 """Create a temporary empty MDIO file for testing."""
9685 # Create the grid with the specified dimensions
9786 dims = [
98- Dimension (name = "inline" , coords = range (1 , 346 , 1 )), # 100-300 with step 1
99- Dimension (name = "crossline" , coords = range (1 , 189 , 1 )), # 1000-1600 with step 2
100- Dimension (name = "time" , coords = range (0 , 3002 , 2 )), # 0-3 seconds 4ms sample rate
87+ Dimension (name = "inline" , coords = range (1 , 346 , 1 )),
88+ Dimension (name = "crossline" , coords = range (1 , 189 , 1 )),
89+ Dimension (name = "time" , coords = range (0 , 3002 , 2 )),
10190 ]
10291
10392 # If later on, we want to export to SEG-Y, we need to provide the trace header spec.
10493 # The HeaderSpec can be either standard or customized.
10594 headers = get_teapot_segy_spec ().trace .header if create_headers else None
10695 # Create an empty MDIO v1 metric post-stack 3D time velocity dataset
10796 return create_empty (
108- mdio_template = PostStack3DVelocityTemplate (data_domain = "time" , is_metric = True ),
97+ mdio_template = PostStack3DVelocityMetricTemplate (data_domain = "time" ),
10998 dimensions = dims ,
11099 output_path = output_path ,
111100 headers = headers ,
112101 overwrite = overwrite ,
113102 )
114103
115104 @classmethod
116- def validate_teapod_dataset_metadata (cls , ds : xr_Dataset , is_velocity : bool ) -> None :
105+ def validate_teapot_dataset_metadata (cls , ds : xr_Dataset , is_velocity : bool ) -> None :
117106 """Validate the dataset metadata."""
118107 if is_velocity :
119108 assert ds .name == "PostStack3DVelocityTime"
@@ -137,7 +126,6 @@ def validate_teapod_dataset_metadata(cls, ds: xr_Dataset, is_velocity: bool) ->
137126
138127 # Check that createdOn exists
139128 assert "createdOn" in actual_attrs_json
140- assert actual_attrs_json ["createdOn" ] is not None
141129
142130 # Validate template attributes
143131 attributes = ds .attrs ["attributes" ]
@@ -152,7 +140,7 @@ def validate_teapod_dataset_metadata(cls, ds: xr_Dataset, is_velocity: bool) ->
152140 assert attributes ["gatherType" ] == "stacked"
153141
154142 @classmethod
155- def validate_teapod_dataset_variables (
143+ def validate_teapot_dataset_variables (
156144 cls , ds : xr_Dataset , header_dtype : np .dtype | None , is_velocity : bool
157145 ) -> None :
158146 """Validate an empty MDIO dataset structure and content."""
@@ -164,7 +152,9 @@ def validate_teapod_dataset_variables(
164152 validate_xr_variable (
165153 ds , "crossline" , {"crossline" : 188 }, UNITS_NONE , np .int32 , False , range (1 , 189 ), get_values
166154 )
167- validate_xr_variable (ds , "time" , {"time" : 1501 }, UNITS_SECOND , np .int32 , False , range (0 , 3002 , 2 ), get_values )
155+ validate_xr_variable (
156+ ds , "time" , {"time" : 1501 }, UNITS_MILLISECOND , np .int32 , False , range (0 , 3002 , 2 ), get_values
157+ )
168158
169159 # Validate the non-dimensional coordinate variables (should be empty for empty dataset)
170160 validate_xr_variable (ds , "cdp_x" , {"inline" : 345 , "crossline" : 188 }, UNITS_METER , np .float64 )
@@ -183,7 +173,7 @@ def validate_teapod_dataset_variables(
183173 # Validate the trace mask (should be all True for empty dataset)
184174 validate_xr_variable (ds , "trace_mask" , {"inline" : 345 , "crossline" : 188 }, UNITS_NONE , np .bool_ )
185175 trace_mask = ds ["trace_mask" ].values
186- assert not np .any (trace_mask ), "All traces should be marked as dead in empty dataset "
176+ assert not np .any (trace_mask ), "Expected all `False` values in `trace_mask` but found `True`. "
187177
188178 # Validate the velocity or amplitude data (should be empty)
189179 if is_velocity :
@@ -222,16 +212,16 @@ def mdio_no_headers(self, empty_mdio_dir: Path) -> Path:
222212 def test_dataset_metadata (self , mdio_with_headers : Path ) -> None :
223213 """Test dataset metadata for empty MDIO file."""
224214 ds = open_mdio (mdio_with_headers )
225- self .validate_teapod_dataset_metadata (ds , is_velocity = True )
215+ self .validate_teapot_dataset_metadata (ds , is_velocity = True )
226216
227217 def test_variables (self , mdio_with_headers : Path , mdio_no_headers : Path ) -> None :
228218 """Test grid validation for empty MDIO file."""
229219 ds = open_mdio (mdio_with_headers )
230220 header_dtype = get_teapot_segy_spec ().trace .header .dtype
231- self .validate_teapod_dataset_variables (ds , header_dtype = header_dtype , is_velocity = True )
221+ self .validate_teapot_dataset_variables (ds , header_dtype = header_dtype , is_velocity = True )
232222
233223 ds = open_mdio (mdio_no_headers )
234- self .validate_teapod_dataset_variables (ds , header_dtype = None , is_velocity = True )
224+ self .validate_teapot_dataset_variables (ds , header_dtype = None , is_velocity = True )
235225
236226 def test_overwrite_behavior (self , empty_mdio_dir : Path ) -> None :
237227 """Test overwrite parameter behavior in create_empty_mdio."""
@@ -258,9 +248,9 @@ def test_overwrite_behavior(self, empty_mdio_dir: Path) -> None:
258248
259249 # Validate that the MDIO file can be loaded correctly using the helper function
260250 ds = open_mdio (empty_mdio )
261- self .validate_teapod_dataset_metadata (ds , is_velocity = True )
251+ self .validate_teapot_dataset_metadata (ds , is_velocity = True )
262252 header_dtype = get_teapot_segy_spec ().trace .header .dtype
263- self .validate_teapod_dataset_variables (ds , header_dtype = header_dtype , is_velocity = True )
253+ self .validate_teapot_dataset_variables (ds , header_dtype = header_dtype , is_velocity = True )
264254
265255 # Verify the garbage data was overwritten (should not exist)
266256 assert not garbage_file .exists (), "Garbage file should have been overwritten"
@@ -403,6 +393,6 @@ def test_create_empty_like(self, teapot_mdio_tmp: Path, empty_mdio_dir: Path) ->
403393 )
404394 assert ds is not None
405395
406- self .validate_teapod_dataset_metadata (ds , is_velocity = False )
396+ self .validate_teapot_dataset_metadata (ds , is_velocity = False )
407397 header_dtype = get_teapot_segy_spec ().trace .header .dtype
408- self .validate_teapod_dataset_variables (ds , header_dtype = header_dtype , is_velocity = False )
398+ self .validate_teapot_dataset_variables (ds , header_dtype = header_dtype , is_velocity = False )
0 commit comments