zusammenfügen 02.2.

This commit is contained in:
2026-02-02 20:01:37 +01:00
parent 774acc3854
commit 4e7c55500b
11 changed files with 62593 additions and 2121 deletions

View File

@@ -117,199 +117,193 @@ class Zuverlaessigkeit:
def aeussere_zuverlaessigkeit_EF_EP_stabil(Lokaltest, labels, Qxx, A, P, s0_apost, unbekannten_liste, x):
def aeussere_zuverlaessigkeit(
Lokaltest, labels, Qxx, A, P, s0_apost, unbekannten_liste, x,
angle_units="rad",
ep_use_abs=True,
exclude_prefixes=("lA_",),
):
df = Lokaltest.copy()
labels = list(labels)
labels = [str(l) for l in list(labels)]
Qxx = np.asarray(Qxx, float)
A = np.asarray(A, float)
P = np.asarray(P, float)
x = np.asarray(x, float).reshape(-1)
namen_str = [str(sym) for sym in unbekannten_liste]
n = A.shape[0]
if len(labels) != n:
raise ValueError(f"len(labels)={len(labels)} passt nicht zu A.shape[0]={n}.")
if len(df) != n:
raise ValueError(f"Lokaltest hat {len(df)} Zeilen, A hat {n} Beobachtungen.")
# Pseudobeobachtungen rausfiltern
keep = np.ones(n, dtype=bool)
if exclude_prefixes:
for i, lbl in enumerate(labels):
if any(lbl.startswith(pref) for pref in exclude_prefixes):
keep[i] = False
# alles konsistent kürzen (wichtig: auch A & P!)
df = df.loc[keep].reset_index(drop=True)
labels = [lbl for (lbl, k) in zip(labels, keep) if k]
A = A[keep, :]
P = P[np.ix_(keep, keep)]
# neue n
n = A.shape[0]
# Daten aus dem Lokaltest
ri = df["r_i"].astype(float).to_numpy()
GF = df["GF_i"].astype(float).to_numpy()
GRZW = df["GRZW_i"].astype(float).to_numpy()
n = A.shape[0]
# Namen als Strings für die Suche
namen_str = [str(sym) for sym in unbekannten_liste]
s0 = float(s0_apost)
# 1) Einflussfaktor EF berechnen
EF = np.zeros(n, dtype=float)
for i in range(n):
nabla_l = np.zeros((n, 1))
nabla_l[i, 0] = GRZW[i]
nabla_x = Qxx @ (A.T @ (P @ nabla_l))
Qxx_inv_nabla_x = np.linalg.solve(Qxx, nabla_x)
EF2 = ((nabla_x.T @ Qxx_inv_nabla_x) / (float(s0_apost) ** 2)).item()
EF[i] = np.sqrt(max(0, EF2))
def to_rad(val):
if angle_units == "rad":
return val
if angle_units == "gon":
return val * (np.pi / 200.0)
if angle_units == "deg":
return val * (np.pi / 180.0)
raise ValueError("angle_units muss 'rad', 'gon' oder 'deg' sein.")
# 2) Koordinaten-Dict
# Punktkoordinaten aus x (für Streckenäquivalent bei Winkel-EP)
coords = {}
punkt_ids = [n[1:] for n in namen_str if n.upper().startswith("X")]
punkt_ids = sorted({name[1:] for name in namen_str
if name[:1].upper() in ("X", "Y", "Z") and len(name) > 1})
for pid in punkt_ids:
try:
ix = namen_str.index(f"X{pid}")
iy = namen_str.index(f"Y{pid}")
iz = namen_str.index(f"Z{pid}")
coords[pid] = (x[ix], x[iy], x[iz] if iz is not None else 0.0)
except:
coords[pid] = (x[ix], x[iy], x[iz])
except ValueError:
continue
# 3) EP + Standpunkte
EP_m = np.full(len(labels), np.nan, dtype=float)
standpunkte = [""] * len(labels)
# Standpunkt/Zielpunkt
standpunkte = [""] * n
zielpunkte = [""] * n
for i, lbl in enumerate(labels):
parts = lbl.split("_")
sp, zp = None, None
if any(k in lbl for k in ["_SD_", "_R_", "_ZW_"]):
if len(parts) >= 5: sp, zp = parts[3].strip(), parts[4].strip()
if len(parts) >= 5:
sp, zp = parts[3].strip(), parts[4].strip()
elif "gnss" in lbl.lower():
sp, zp = parts[-2].strip(), parts[-1].strip()
if len(parts) >= 2:
sp, zp = parts[-2].strip(), parts[-1].strip()
elif "niv" in lbl.lower():
if len(parts) >= 4:
sp = parts[3].strip()
if len(parts) >= 5:
zp = parts[4].strip()
else:
sp = parts[-1].strip()
standpunkte[i] = sp if sp is not None else ""
standpunkte[i] = sp or ""
zielpunkte[i] = zp or ""
# SD, GNSS, Niv: direkt Wegfehler
if "_SD_" in lbl or "gnss" in lbl.lower() or "niv" in lbl.lower():
EP_m[i] = (1.0 - ri[i]) * GF[i]
# Winkel: Streckenäquivalent
elif "_R_" in lbl or "_ZW_" in lbl:
if sp in coords and zp in coords:
X1, Y1, _ = coords[sp]
X2, Y2, _ = coords[zp]
s = np.sqrt((X2 - X1) ** 2 + (Y2 - Y1) ** 2)
EP_m[i] = (1.0 - ri[i]) * (GF[i] * s)
# Berechnung des EPs
EP_GF = (1.0 - ri) * GF
EP_grzw = (1.0 - ri) * GRZW
if ep_use_abs:
EP_GF = np.abs(EP_GF)
EP_grzw = np.abs(EP_grzw)
# 4) SP am Standpunkt (2D oder 1D)
diagQ = np.diag(Qxx)
SP_cache_mm = {}
for sp in set([s for s in standpunkte if s]):
try:
ix = namen_str.index(f"X{sp}")
iy = namen_str.index(f"Y{sp}")
SP_cache_mm[sp] = float(s0_apost) * np.sqrt(diagQ[ix] + diagQ[iy]) * 1000.0
except ValueError:
# Falls keine Lage, prüfe Höhe (Nivellement)
try:
iz = namen_str.index(f"Z{sp}")
SP_cache_mm[sp] = float(s0_apost) * np.sqrt(diagQ[iz]) * 1000.0
except ValueError:
SP_cache_mm[sp] = 0.0
SP_mm = np.array([SP_cache_mm.get(sp, np.nan) for sp in standpunkte], dtype=float)
return pd.DataFrame({
"Beobachtung": labels, "Stand-Pkt": standpunkte, "EF": EF,
"EP [mm]": EP_m * 1000.0, "SP [mm]": SP_mm, "EF*SP [mm]": EF * SP_mm
})
def aeussere_zuverlaessigkeit_EF_EP(Lokaltest, labels, Qxx, A, P, s0_apost, unbekannten_liste, x):
df = Lokaltest.copy()
labels = list(labels)
Qxx = np.asarray(Qxx, float)
A = np.asarray(A, float)
P = np.asarray(P, float)
x = np.asarray(x, float).reshape(-1)
ri = df["r_i"].astype(float).to_numpy()
GF = df["GF_i"].astype(float).to_numpy()
s_vi = df["s_vi"].astype(float).to_numpy()
GRZW = df["GRZW_i"].astype(float).to_numpy()
nzp = df["δ0"].astype(float).to_numpy()
n = A.shape[0] # Anzahl Beobachtungen
u = A.shape[1] # Anzahl Unbekannte
# Einflussfaktor EF berechnen
EF = np.zeros(n, dtype=float)
for i in range(n):
# 1) ∇l_i aufstellen
nabla_l = np.zeros((n, 1))
nabla_l[i, 0] = GRZW[i]
# 2) ∇x_i = Qxx * A^T * P * ∇l_i
nabla_x = Qxx @ (A.T @ (P @ nabla_l))
# 3) EF_i^2 = (∇x_i^T * Qxx^{-1} * ∇x_i) / s0^2
Qxx_inv_nabla_x = np.linalg.solve(Qxx, nabla_x) # = Qxx^{-1} ∇x_i
#EF2 = float((nabla_x.T @ Qxx_inv_nabla_x) / (float(s0_apost) ** 2)).item()
EF2 = ((nabla_x.T @ Qxx_inv_nabla_x) / (float(s0_apost) ** 2)).item()
EF[i] = np.sqrt(EF2)
# Koordinaten-Dict aus x
coords = {}
j = 0
while j < len(unbekannten_liste):
name = str(unbekannten_liste[j])
if name.startswith("X"):
pn = name[1:]
coords[pn] = (x[j], x[j + 1], x[j + 2])
j += 3
else:
j += 1
# EP + Standpunkte
EP_m = np.full(len(labels), np.nan, dtype=float)
standpunkte = [""] * len(labels)
EP_hat_m = np.full(n, np.nan, float)
EP_grzw_m = np.full(n, np.nan, float)
for i, lbl in enumerate(labels):
parts = lbl.split("_")
sp = None
zp = None
sp = standpunkte[i]
zp = zielpunkte[i]
# Tachymeter: ID_SD_GRP_SP_ZP / ID_R_GRP_SP_ZP / ID_ZW_GRP_SP_ZP
if ("_SD_" in lbl) or ("_R_" in lbl) or ("_ZW_" in lbl):
if len(parts) >= 5:
sp = parts[3].strip()
zp = parts[4].strip()
is_angle = ("_R_" in lbl) or ("_ZW_" in lbl)
if not is_angle:
EP_hat_m[i] = EP_GF[i]
EP_grzw_m[i] = EP_grzw[i]
continue
# GNSS: *_gnssbx_SP_ZP etc.
if ("gnss" in lbl) and (len(parts) >= 4):
sp = parts[-2].strip()
zp = parts[-1].strip()
# Winkel -> Querabweichung = Winkel(rad) * Strecke (3D)
if sp in coords and zp in coords:
X1, Y1, Z1 = coords[sp]
X2, Y2, Z2 = coords[zp]
s = np.sqrt((X2 - X1) ** 2 + (Y2 - Y1) ** 2 + (Z2 - Z1) ** 2)
standpunkte[i] = sp if sp is not None else ""
one_minus_r = (1.0 - ri[i])
EP_hat_m[i] = to_rad(EP_GF[i]) * s
EP_grzw_m[i] = to_rad(EP_grzw[i]) * s
# SD + GNSS: direkt in m
if ("_SD_" in lbl) or ("gnss" in lbl):
EP_m[i] = one_minus_r * GF[i]
# 3x3 Blöcke
def idx_xyz(pid):
return [
namen_str.index(f"X{pid}"),
namen_str.index(f"Y{pid}"),
namen_str.index(f"Z{pid}")
]
# R / ZW: Winkel -> Streckenäquivalent über s
elif ("_R_" in lbl) or ("_ZW_" in lbl):
if sp and zp and (sp in coords) and (zp in coords):
X1, Y1, Z1 = coords[sp]
X2, Y2, Z2 = coords[zp]
s = float(np.sqrt((X2 - X1) ** 2 + (Y2 - Y1) ** 2 + (Z2 - Z1) ** 2))
EP_m[i] = one_minus_r * ((GF[i]) * s)
# EF lokal + SP lokal (3D)
EF = np.full(n, np.nan, float)
SP_loc_m = np.full(n, np.nan, float)
EFSP_loc_m = np.full(n, np.nan, float)
# SP am Standpunkt (2D)
diagQ = np.diag(Qxx)
SP_cache_mm = {}
for i in range(n):
sp = standpunkte[i]
zp = zielpunkte[i]
for sp in set([s for s in standpunkte if s]):
idx_x = [k for k, sym in enumerate(unbekannten_liste) if str(sym) == f"X{sp}"][0]
qx = diagQ[idx_x]
qy = diagQ[idx_x + 1]
SP_cache_mm[sp] = float(s0_apost) * np.sqrt(qx + qy) * 1000.0
blocks = []
idx = []
SP_mm = np.array([SP_cache_mm.get(sp, np.nan) for sp in standpunkte], dtype=float)
try:
if sp:
b = idx_xyz(sp)
blocks.append(b)
idx += b
if zp:
b = idx_xyz(zp)
blocks.append(b)
idx += b
except ValueError:
continue
out = pd.DataFrame({
if not blocks:
continue
idx = list(dict.fromkeys(idx)) # unique
# Δx_i aus Grenzstörung
dl = np.zeros((n, 1))
dl[i, 0] = GRZW[i]
dx = Qxx @ (A.T @ (P @ dl))
dx_loc = dx[idx, :]
Q_loc = Qxx[np.ix_(idx, idx)]
# EF lokal
EF2 = (dx_loc.T @ np.linalg.solve(Q_loc, dx_loc)).item() / (s0 ** 2)
EF[i] = np.sqrt(max(0.0, EF2))
# SP lokal 3D: max trace der 3x3 Punktblöcke
tr_list = [np.trace(Qxx[np.ix_(b, b)]) for b in blocks]
if not tr_list:
continue
sigmaPmax_loc = s0 * np.sqrt(max(tr_list))
SP_loc_m[i] = sigmaPmax_loc
EFSP_loc_m[i] = EF[i] * sigmaPmax_loc
ausgabe_zuv = pd.DataFrame({
"Beobachtung": labels,
"Stand-Pkt": standpunkte,
"Ziel-Pkt": zielpunkte,
"r_i": ri,
"EP_GF [mm]": EP_hat_m * 1000.0,
"EP_grzw [mm]": EP_grzw_m * 1000.0,
"EF": EF,
"EP [mm]": EP_m * 1000.0,
"SP [mm]": SP_mm,
"EF*SP [mm]": EF * SP_mm,
"SP_loc_3D [mm]": SP_loc_m * 1000.0,
"EF*SP_loc_3D [mm]": EFSP_loc_m * 1000.0,
})
return out
return ausgabe_zuv