Skip to content

Commit cb0dfb2

Browse files
authored
Merge pull request #47 from simonsobs/site_fixes
Site fixes
2 parents 0808af5 + c66cec7 commit cb0dfb2

File tree

417 files changed

+46467
-132
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

417 files changed

+46467
-132
lines changed

lat_alignment/alignment.py

Lines changed: 91 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,11 @@
1010
from functools import partial
1111
from importlib.resources import files
1212

13+
import matplotlib.pyplot as plt
1314
import megham.transform as mt
1415
import numpy as np
1516
import yaml
17+
from numpy.lib.arraysetops import isin
1618
from numpy.typing import NDArray
1719
from pqdm.processes import pqdm
1820

@@ -108,10 +110,16 @@ def main():
108110

109111
mode = cfg.get("mode", "panel")
110112
cfgdir = os.path.dirname(os.path.abspath(args.config))
111-
meas_file = os.path.abspath(os.path.join(cfgdir, cfg["measurement"]))
113+
meas_files = cfg["measurement"]
114+
if isinstance(meas_files, str):
115+
meas_files = [meas_files]
116+
meas_files = [os.path.abspath(os.path.join(cfgdir, meas)) for meas in meas_files]
117+
tracker_yamls = cfg.get("tracker_yaml", "")
118+
if isinstance(tracker_yamls, str):
119+
tracker_yamls = [tracker_yamls]
112120
title_str = cfg["title"]
113121
logger.info("Begining alignment %s in %s mode", title_str, mode)
114-
logger.debug("Using measurement file: %s", meas_file)
122+
logger.debug("Using measurement files: %s", meas_files)
115123

116124
dat_dir = os.path.abspath(os.path.join(cfgdir, cfg.get("data_dir", "/")))
117125
if "data_dir" in cfg:
@@ -122,7 +130,10 @@ def main():
122130
ref_path = str(files("lat_alignment.data").joinpath("reference.yaml"))
123131
with open(ref_path) as file:
124132
reference = yaml.safe_load(file)
125-
dataset = io.load_data(meas_file, **cfg.get("load", {"source": "photo"}))
133+
datasets = [
134+
io.load_data(meas_file, **cfg.get("load", {"source": "photo"}))
135+
for meas_file in meas_files
136+
]
126137
if "data_dir" in cfg:
127138
corner_path_m1 = os.path.join(dat_dir, f"primary_corners.yaml")
128139
adj_path_m1 = os.path.join(dat_dir, f"primary_adj.csv")
@@ -159,47 +170,73 @@ def main():
159170
logger.info("Aligning panels for the %s mirror", mirror)
160171

161172
# init, fit, and plot panels
162-
try:
163-
if isinstance(dataset, ds.DatasetPhotogrammetry):
164-
dataset, _ = da.align_photo(
165-
dataset, reference, True, mirror, **cfg.get("align_photo", {})
166-
)
167-
else:
168-
dataset, _ = da.align_tracker(
169-
dataset, cfg["tracker_yaml"], mirror, **cfg.get("align_tracker", {})
170-
)
171-
except Exception as e:
172-
logger.error("Failed to align to reference points, with error %s", str(e))
173-
bootstrap_from = cfg.get("bootstrap_from", "all")
174-
logger.info("Bootstrapping from %s", bootstrap_from)
175-
if isinstance(dataset, ds.DatasetPhotogrammetry):
176-
dataset, _ = da.align_photo(
177-
dataset,
178-
reference,
179-
True,
180-
bootstrap_from,
181-
**cfg.get("align_photo", {}),
182-
)
183-
else:
184-
dataset, _ = da.align_tracker(
185-
dataset,
186-
cfg["tracker_yaml"],
187-
bootstrap_from,
188-
**cfg.get("align_tracker", {}),
189-
)
190-
if bootstrap_from == "primary":
191-
points = tf.coord_transform(
192-
dataset.points, "opt_primary", f"opt_{mirror}"
193-
)
194-
elif bootstrap_from == "secondary":
195-
points = tf.coord_transform(
196-
dataset.points, "opt_secondary", f"opt_{mirror}"
173+
data_dict = {}
174+
for i, dataset in enumerate(datasets):
175+
try:
176+
if isinstance(dataset, ds.DatasetPhotogrammetry):
177+
dataset, _ = da.align_photo(
178+
dataset, reference, True, mirror, **cfg.get("align_photo", {})
179+
)
180+
else:
181+
dataset, _ = da.align_tracker(
182+
dataset,
183+
tracker_yamls[i],
184+
mirror,
185+
**cfg.get("align_tracker", {}),
186+
)
187+
except Exception as e:
188+
logger.error(
189+
"Failed to align to reference points, with error %s", str(e)
197190
)
198-
else:
199-
points = tf.coord_transform(
200-
dataset.points, "opt_global", f"opt_{mirror}"
201-
)
202-
dataset.data_dict = {l: p for l, p in zip(dataset.labels, points)}
191+
bootstrap_from = cfg.get("bootstrap_from", "all")
192+
logger.info("Bootstrapping from %s", bootstrap_from)
193+
if isinstance(dataset, ds.DatasetPhotogrammetry):
194+
dataset, _ = da.align_photo(
195+
dataset,
196+
reference,
197+
True,
198+
bootstrap_from,
199+
**cfg.get("align_photo", {}),
200+
)
201+
else:
202+
dataset, _ = da.align_tracker(
203+
dataset,
204+
tracker_yamls[i],
205+
bootstrap_from,
206+
**cfg.get("align_tracker", {}),
207+
)
208+
if bootstrap_from == "primary":
209+
points = tf.coord_transform(
210+
dataset.points, "opt_primary", f"opt_{mirror}"
211+
)
212+
elif bootstrap_from == "secondary":
213+
points = tf.coord_transform(
214+
dataset.points, "opt_secondary", f"opt_{mirror}"
215+
)
216+
else:
217+
points = tf.coord_transform(
218+
dataset.points, "opt_global", f"opt_{mirror}"
219+
)
220+
dataset.data_dict = {l: p for l, p in zip(dataset.labels, points)}
221+
ddict = {f"{l}_{i}": p for l, p in zip(dataset.labels, dataset.points)}
222+
data_dict = data_dict | ddict
223+
dataset = datasets[0].__class__(data_dict)
224+
append = ""
225+
if "sample_every" in cfg:
226+
i, j = cfg["sample_every"]
227+
ddict = {l: p for l, p in zip(dataset.labels[i::j], dataset.points[i::j])}
228+
dataset.data_dict = ddict
229+
append = f"_{i}_{j}"
230+
231+
fig = plt.figure()
232+
ax = fig.add_subplot(projection="3d")
233+
ax.scatter(
234+
dataset.targets[:, 0],
235+
dataset.targets[:, 1],
236+
dataset.targets[:, 2],
237+
marker="x",
238+
)
239+
plt.show()
203240
dataset, _ = mir.remove_cm(
204241
dataset, mirror, cfg.get("compensate", 0), **cfg.get("common_mode", {})
205242
)
@@ -229,13 +266,15 @@ def main():
229266
cfg.get("adjuster_radius", 100),
230267
)
231268
logger.info("Found measurements for %d panels", len(panels))
232-
fig = mir.plot_panels(panels, title_str, vmax=cfg.get("vmax", None))
233-
fig.savefig(os.path.join(cfgdir, f"{title_str.replace(' ', '_')}.png"))
269+
fig = mir.plot_panels(
270+
panels, title_str, vmax=cfg.get("vmax", None), use_iqr=cfg.get("iqr", False)
271+
)
272+
fig.savefig(os.path.join(cfgdir, f"{title_str.replace(' ', '_')}{append}.png"))
234273
res_all = np.vstack([panel.residuals for panel in panels])
235274
model_all = np.vstack([panel.model for panel in panels])
236275
mir_out = np.hstack([model_all, res_all])
237276
np.savetxt(
238-
os.path.join(cfgdir, f"{title_str.replace(' ', '_')}_surface.txt"),
277+
os.path.join(cfgdir, f"{title_str.replace(' ', '_')}_surface{append}.txt"),
239278
mir_out,
240279
header="x y z x_res y_res z_res",
241280
)
@@ -249,11 +288,15 @@ def main():
249288
order = np.lexsort((adjustments[:, 2], adjustments[:, 1], adjustments[:, 0]))
250289
adjustments = adjustments[order]
251290
np.savetxt(
252-
os.path.join(cfgdir, f"{title_str.replace(' ', '_')}.csv"),
291+
os.path.join(cfgdir, f"{title_str.replace(' ', '_')}{append}.csv"),
253292
adjustments,
254293
fmt=["%d", "%d", "%d"] + ["%.5f"] * 14,
255294
)
256295
elif mode == "optical":
296+
if len(datasets) > 1 or len(tracker_yamls) > 1:
297+
raise ValueError("Cannot have multiple files in optical mode")
298+
dataset = datasets[0]
299+
tracker_yaml = tracker_yamls[0]
257300
align_to = cfg["align_to"]
258301
if align_to not in ["primary", "secondary", "receiver", "bearing"]:
259302
raise ValueError(f"Invalid element specified for 'align_to': {align_to}")
@@ -264,7 +307,7 @@ def main():
264307
)
265308
else:
266309
dataset, _ = da.align_tracker(
267-
dataset, cfg["tracker_yaml"], "all", **cfg.get("align_tracker", {})
310+
dataset, tracker_yaml, "all", **cfg.get("align_tracker", {})
268311
)
269312

270313
# Load data and compute the transformation to align with the model

lat_alignment/data/reference_va.yaml

Lines changed: 0 additions & 26 deletions
This file was deleted.

lat_alignment/data_alignment.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,12 +158,12 @@ def align_photo(
158158
# invars = ["TARGET35", "TARGET4", "TARGET484"] #, "TARGET421"]
159159
# pts = [dataset[label] for label in invars]
160160
# print(invars)
161-
if len(ref) < 3:
161+
if len(ref) < 4:
162162
logger.warning(f"Only {len(ref)} reference points found!")
163163
logger.warning(f"Adding reference codes")
164164
pts += found_coded
165165
ref += ref_coded
166-
if len(ref) < 3:
166+
if len(ref) < 4:
167167
raise ValueError(
168168
f"Only {len(ref)} reference points found including codes! Can't align!"
169169
)

lat_alignment/io.py

Lines changed: 41 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -52,11 +52,29 @@ def _load_tracker_yaml(path: str):
5252
return DatasetReference(data)
5353

5454

55-
def _load_tracker_txt(path: str):
56-
data = np.genfromtxt(path, usecols=(3, 4, 5), skip_header=1, dtype=str)
55+
def _load_tracker_txt(path: str, group_dist=0.02, group_thresh=0.02):
56+
data = np.genfromtxt(
57+
path, usecols=(3, 4, 5), skip_header=1, dtype=str, delimiter="\t"
58+
)
5759
data = np.char.replace(data, ",", "").astype(float)
5860

59-
data = {f"TARGET_{i}": dat for i, dat in enumerate(data)}
61+
to_kill = []
62+
if group_dist > 0 and group_thresh > 0:
63+
done = []
64+
edm = make_edm(data[:, :2])
65+
np.fill_diagonal(edm, np.nan)
66+
for i in range(len(edm)):
67+
if i in to_kill or i in done:
68+
continue
69+
group_idx = np.hstack(([i], np.where(edm[i] <= group_dist)[0]))
70+
done += group_idx.tolist()
71+
if len(group_idx) == 1:
72+
continue
73+
zs = data[group_idx, 2]
74+
bad_zs = np.abs(zs - np.median(zs)) > group_thresh
75+
to_kill += group_idx[bad_zs].tolist()
76+
logger.info("\tFound and removed %d bad group points", len(to_kill))
77+
data = {f"TARGET_{i}": dat for i, dat in enumerate(data) if i not in to_kill}
6078

6179
return Dataset(data)
6280

@@ -68,7 +86,7 @@ def _load_tracker_csv(path: str):
6886
)
6987

7088

71-
def load_tracker(path: str) -> Dataset:
89+
def load_tracker(path: str, group_dist=0.02, group_thresh=0.02) -> Dataset:
7290
"""
7391
Load laser tracker data.
7492
TODO: This interface needs to be unified with `load_photo` so all code can use either datatype interchangibly
@@ -78,6 +96,14 @@ def load_tracker(path: str) -> Dataset:
7896
path : str
7997
The path to the laser tracker data.
8098
The type of data will be infered from the extension.
99+
group_dist : float, default: 0.02
100+
Distance between points in xy needed to group them for cuts.
101+
Only used for `.txt` files.
102+
Set to 0 to disable.
103+
group_thresh : float, default: 0.02
104+
Difference in z between point and the median z for a group to cut at.
105+
Only used for `.txt` files.
106+
Set to 0 to disable.
81107
82108
Returns
83109
-------
@@ -90,7 +116,7 @@ def load_tracker(path: str) -> Dataset:
90116
if ext == ".yaml":
91117
return _load_tracker_yaml(path)
92118
elif ext == ".txt":
93-
return _load_tracker_txt(path)
119+
return _load_tracker_txt(path, group_dist, group_thresh)
94120
elif ext == ".csv":
95121
return _load_tracker_csv(path)
96122
raise ValueError(f"Invalid tracker data with extension {ext}")
@@ -152,12 +178,20 @@ def load_photo(
152178
to_kill += [labels[trg_msk][i]]
153179
msk = ~np.isin(labels, to_kill)
154180
logger.info("\tFound and removed %d doubles", len(to_kill))
155-
labels, coords = labels[msk], coords[msk]
181+
labels, coords, err = labels[msk], coords[msk], err[msk]
156182

157183
if plot:
158184
fig = plt.figure()
159185
ax = fig.add_subplot(projection="3d")
160-
ax.scatter(coords[:, 0], coords[:, 1], coords[:, 2], marker="x")
186+
p = ax.scatter(
187+
coords[:, 0],
188+
coords[:, 1],
189+
coords[:, 2],
190+
marker="x",
191+
c=err,
192+
vmax=np.percentile(err, 90),
193+
)
194+
fig.colorbar(p)
161195
plt.show()
162196

163197
data = {label: coord for label, coord in zip(labels, coords)}

0 commit comments

Comments
 (0)