zusammenfügen 30.1.
This commit is contained in:
@@ -116,7 +116,103 @@ class Zuverlaessigkeit:
|
||||
return Lokaltest_innere_Zuv
|
||||
|
||||
|
||||
def aeussere_zuverlaessigkeit_EF_EP1(Lokaltest, labels, Qxx, A, P, s0_apost, unbekannten_liste, x):
|
||||
|
||||
def aeussere_zuverlaessigkeit_EF_EP_stabil(Lokaltest, labels, Qxx, A, P, s0_apost, unbekannten_liste, x):
|
||||
df = Lokaltest.copy()
|
||||
labels = list(labels)
|
||||
Qxx = np.asarray(Qxx, float)
|
||||
A = np.asarray(A, float)
|
||||
P = np.asarray(P, float)
|
||||
x = np.asarray(x, float).reshape(-1)
|
||||
ri = df["r_i"].astype(float).to_numpy()
|
||||
GF = df["GF_i"].astype(float).to_numpy()
|
||||
GRZW = df["GRZW_i"].astype(float).to_numpy()
|
||||
n = A.shape[0]
|
||||
|
||||
# Namen als Strings für die Suche
|
||||
namen_str = [str(sym) for sym in unbekannten_liste]
|
||||
|
||||
# 1) Einflussfaktor EF berechnen
|
||||
EF = np.zeros(n, dtype=float)
|
||||
for i in range(n):
|
||||
nabla_l = np.zeros((n, 1))
|
||||
nabla_l[i, 0] = GRZW[i]
|
||||
nabla_x = Qxx @ (A.T @ (P @ nabla_l))
|
||||
Qxx_inv_nabla_x = np.linalg.solve(Qxx, nabla_x)
|
||||
EF2 = ((nabla_x.T @ Qxx_inv_nabla_x) / (float(s0_apost) ** 2)).item()
|
||||
EF[i] = np.sqrt(max(0, EF2))
|
||||
|
||||
# 2) Koordinaten-Dict
|
||||
coords = {}
|
||||
punkt_ids = [n[1:] for n in namen_str if n.upper().startswith("X")]
|
||||
|
||||
for pid in punkt_ids:
|
||||
try:
|
||||
ix = namen_str.index(f"X{pid}")
|
||||
iy = namen_str.index(f"Y{pid}")
|
||||
iz = namen_str.index(f"Z{pid}")
|
||||
|
||||
coords[pid] = (x[ix], x[iy], x[iz] if iz is not None else 0.0)
|
||||
except:
|
||||
continue
|
||||
|
||||
# 3) EP + Standpunkte
|
||||
EP_m = np.full(len(labels), np.nan, dtype=float)
|
||||
standpunkte = [""] * len(labels)
|
||||
|
||||
for i, lbl in enumerate(labels):
|
||||
parts = lbl.split("_")
|
||||
sp, zp = None, None
|
||||
|
||||
if any(k in lbl for k in ["_SD_", "_R_", "_ZW_"]):
|
||||
if len(parts) >= 5: sp, zp = parts[3].strip(), parts[4].strip()
|
||||
elif "gnss" in lbl.lower():
|
||||
sp, zp = parts[-2].strip(), parts[-1].strip()
|
||||
elif "niv" in lbl.lower():
|
||||
if len(parts) >= 4:
|
||||
sp = parts[3].strip()
|
||||
else:
|
||||
sp = parts[-1].strip()
|
||||
|
||||
standpunkte[i] = sp if sp is not None else ""
|
||||
|
||||
# SD, GNSS, Niv: direkt Wegfehler
|
||||
if "_SD_" in lbl or "gnss" in lbl.lower() or "niv" in lbl.lower():
|
||||
EP_m[i] = (1.0 - ri[i]) * GF[i]
|
||||
# Winkel: Streckenäquivalent
|
||||
elif "_R_" in lbl or "_ZW_" in lbl:
|
||||
if sp in coords and zp in coords:
|
||||
X1, Y1, _ = coords[sp]
|
||||
X2, Y2, _ = coords[zp]
|
||||
s = np.sqrt((X2 - X1) ** 2 + (Y2 - Y1) ** 2)
|
||||
EP_m[i] = (1.0 - ri[i]) * (GF[i] * s)
|
||||
|
||||
# 4) SP am Standpunkt (2D oder 1D)
|
||||
diagQ = np.diag(Qxx)
|
||||
SP_cache_mm = {}
|
||||
for sp in set([s for s in standpunkte if s]):
|
||||
try:
|
||||
ix = namen_str.index(f"X{sp}")
|
||||
iy = namen_str.index(f"Y{sp}")
|
||||
SP_cache_mm[sp] = float(s0_apost) * np.sqrt(diagQ[ix] + diagQ[iy]) * 1000.0
|
||||
except ValueError:
|
||||
# Falls keine Lage, prüfe Höhe (Nivellement)
|
||||
try:
|
||||
iz = namen_str.index(f"Z{sp}")
|
||||
SP_cache_mm[sp] = float(s0_apost) * np.sqrt(diagQ[iz]) * 1000.0
|
||||
except ValueError:
|
||||
SP_cache_mm[sp] = 0.0
|
||||
|
||||
SP_mm = np.array([SP_cache_mm.get(sp, np.nan) for sp in standpunkte], dtype=float)
|
||||
|
||||
return pd.DataFrame({
|
||||
"Beobachtung": labels, "Stand-Pkt": standpunkte, "EF": EF,
|
||||
"EP [mm]": EP_m * 1000.0, "SP [mm]": SP_mm, "EF*SP [mm]": EF * SP_mm
|
||||
})
|
||||
|
||||
|
||||
|
||||
def aeussere_zuverlaessigkeit_EF_EP(Lokaltest, labels, Qxx, A, P, s0_apost, unbekannten_liste, x):
|
||||
df = Lokaltest.copy()
|
||||
labels = list(labels)
|
||||
Qxx = np.asarray(Qxx, float)
|
||||
@@ -216,99 +312,4 @@ class Zuverlaessigkeit:
|
||||
"SP [mm]": SP_mm,
|
||||
"EF*SP [mm]": EF * SP_mm,
|
||||
})
|
||||
return out
|
||||
|
||||
|
||||
def aeussere_zuverlaessigkeit_EF_EP_stabil(Lokaltest, labels, Qxx, A, P, s0_apost, unbekannten_liste, x):
|
||||
df = Lokaltest.copy()
|
||||
labels = list(labels)
|
||||
Qxx = np.asarray(Qxx, float)
|
||||
A = np.asarray(A, float)
|
||||
P = np.asarray(P, float)
|
||||
x = np.asarray(x, float).reshape(-1)
|
||||
ri = df["r_i"].astype(float).to_numpy()
|
||||
GF = df["GF_i"].astype(float).to_numpy()
|
||||
GRZW = df["GRZW_i"].astype(float).to_numpy()
|
||||
n = A.shape[0]
|
||||
|
||||
# Namen als Strings für die Suche
|
||||
namen_str = [str(sym) for sym in unbekannten_liste]
|
||||
|
||||
# 1) Einflussfaktor EF berechnen
|
||||
EF = np.zeros(n, dtype=float)
|
||||
for i in range(n):
|
||||
nabla_l = np.zeros((n, 1))
|
||||
nabla_l[i, 0] = GRZW[i]
|
||||
nabla_x = Qxx @ (A.T @ (P @ nabla_l))
|
||||
Qxx_inv_nabla_x = np.linalg.solve(Qxx, nabla_x)
|
||||
EF2 = ((nabla_x.T @ Qxx_inv_nabla_x) / (float(s0_apost) ** 2)).item()
|
||||
EF[i] = np.sqrt(max(0, EF2))
|
||||
|
||||
# 2) Koordinaten-Dict
|
||||
coords = {}
|
||||
punkt_ids = [n[1:] for n in namen_str if n.upper().startswith("X")]
|
||||
|
||||
for pid in punkt_ids:
|
||||
try:
|
||||
ix = namen_str.index(f"X{pid}")
|
||||
iy = namen_str.index(f"Y{pid}")
|
||||
iz = namen_str.index(f"Z{pid}")
|
||||
|
||||
coords[pid] = (x[ix], x[iy], x[iz] if iz is not None else 0.0)
|
||||
except:
|
||||
continue
|
||||
|
||||
# 3) EP + Standpunkte
|
||||
EP_m = np.full(len(labels), np.nan, dtype=float)
|
||||
standpunkte = [""] * len(labels)
|
||||
|
||||
for i, lbl in enumerate(labels):
|
||||
parts = lbl.split("_")
|
||||
sp, zp = None, None
|
||||
|
||||
if any(k in lbl for k in ["_SD_", "_R_", "_ZW_"]):
|
||||
if len(parts) >= 5: sp, zp = parts[3].strip(), parts[4].strip()
|
||||
elif "gnss" in lbl.lower():
|
||||
sp, zp = parts[-2].strip(), parts[-1].strip()
|
||||
elif "niv" in lbl.lower():
|
||||
if len(parts) >= 4:
|
||||
sp = parts[3].strip()
|
||||
else:
|
||||
sp = parts[-1].strip()
|
||||
|
||||
standpunkte[i] = sp if sp is not None else ""
|
||||
one_minus_r = (1.0 - ri[i])
|
||||
|
||||
# SD, GNSS, Niv: direkt Wegfehler
|
||||
if "_SD_" in lbl or "gnss" in lbl.lower() or "niv" in lbl.lower():
|
||||
EP_m[i] = one_minus_r * GF[i]
|
||||
# Winkel: Streckenäquivalent
|
||||
elif "_R_" in lbl or "_ZW_" in lbl:
|
||||
if sp in coords and zp in coords:
|
||||
X1, Y1, _ = coords[sp]
|
||||
X2, Y2, _ = coords[zp]
|
||||
s = np.sqrt((X2 - X1) ** 2 + (Y2 - Y1) ** 2)
|
||||
EP_m[i] = one_minus_r * (GF[i] * s)
|
||||
|
||||
# 4) SP am Standpunkt (2D oder 1D)
|
||||
diagQ = np.diag(Qxx)
|
||||
SP_cache_mm = {}
|
||||
for sp in set([s for s in standpunkte if s]):
|
||||
try:
|
||||
ix = namen_str.index(f"X{sp}")
|
||||
iy = namen_str.index(f"Y{sp}")
|
||||
SP_cache_mm[sp] = float(s0_apost) * np.sqrt(diagQ[ix] + diagQ[iy]) * 1000.0
|
||||
except ValueError:
|
||||
# Falls keine Lage, prüfe Höhe (Nivellement)
|
||||
try:
|
||||
iz = namen_str.index(f"Z{sp}")
|
||||
SP_cache_mm[sp] = float(s0_apost) * np.sqrt(diagQ[iz]) * 1000.0
|
||||
except ValueError:
|
||||
SP_cache_mm[sp] = 0.0
|
||||
|
||||
SP_mm = np.array([SP_cache_mm.get(sp, np.nan) for sp in standpunkte], dtype=float)
|
||||
|
||||
return pd.DataFrame({
|
||||
"Beobachtung": labels, "Stand-Pkt": standpunkte, "EF": EF,
|
||||
"EP [mm]": EP_m * 1000.0, "SP [mm]": SP_mm, "EF*SP [mm]": EF * SP_mm
|
||||
})
|
||||
return out
|
||||
Reference in New Issue
Block a user