zusammenfügen 13.1.
This commit is contained in:
@@ -131,14 +131,14 @@ class Berechnungen:
|
||||
beobachtsgruppeID_aktuell = beobachtung_tachymeter[3]
|
||||
if beobachtsgruppeID_aktuell == beobachtsgruppeID_vorher:
|
||||
richtung = float(self.Richtung(Azimut, orientierung))
|
||||
liste_azimut_richtungen.append((beobachtsgruppeID_aktuell, standpunkt, zielpunkt, Azimut, richtung, Zenitwinkel, schraegstrecke))
|
||||
liste_azimut_richtungen.append((beobachtsgruppeID_aktuell, standpunkt, zielpunkt, Azimut, richtung, Zenitwinkel, schraegstrecke, orientierung))
|
||||
|
||||
else:
|
||||
orientierung = Azimut
|
||||
dict_orientierungen[beobachtsgruppeID_aktuell] = orientierung
|
||||
|
||||
richtung = float(self.Richtung(Azimut, orientierung))
|
||||
liste_azimut_richtungen.append((beobachtsgruppeID_aktuell, standpunkt, zielpunkt, Azimut, richtung, Zenitwinkel, schraegstrecke))
|
||||
liste_azimut_richtungen.append((beobachtsgruppeID_aktuell, standpunkt, zielpunkt, Azimut, richtung, Zenitwinkel, schraegstrecke, orientierung))
|
||||
|
||||
beobachtsgruppeID_vorher = beobachtsgruppeID_aktuell
|
||||
return liste_azimut_richtungen, dict_orientierungen
|
||||
|
||||
49738
Campusnetz.ipynb
49738
Campusnetz.ipynb
File diff suppressed because it is too large
Load Diff
20
Datenbank.py
20
Datenbank.py
@@ -154,7 +154,7 @@ class Datenbankzugriff:
|
||||
if beobachtungsart == "Tachymeter_Richtung" or beobachtungsart == "Tachymeter_Zenitwinkel" :
|
||||
stabw_apriori_konstant = Einheitenumrechnung.mgon_to_rad_Decimal(stabw_apriori_konstant)
|
||||
|
||||
if beobachtungsart == "Tachymeter_Strecke":
|
||||
if beobachtungsart == "Tachymeter_Strecke" or beobachtungsart == "Geometrisches_Nivellement":
|
||||
stabw_apriori_konstant = Einheitenumrechnung.mm_to_m(stabw_apriori_konstant)
|
||||
|
||||
if isinstance(stabw_apriori_konstant, Decimal):
|
||||
@@ -303,7 +303,7 @@ class Datenbankzugriff:
|
||||
for hfp in liste_normalhoehe_hfp:
|
||||
if str(hfp[0]) in liste_punktnummern_in_db:
|
||||
cursor.execute("UPDATE Netzpunkte SET normalhoehe_hfp = ? WHERE punktnummer = ?",
|
||||
(hfp[1], hfp[0])
|
||||
(hfp[3], hfp[0])
|
||||
)
|
||||
ausgaben.append(f"Der HFP {hfp[0]} wurde aktualisiert.")
|
||||
else:
|
||||
@@ -414,6 +414,14 @@ class Datenbankzugriff:
|
||||
con.close()
|
||||
return liste_beobachtungen
|
||||
|
||||
def get_beobachtungen_nivellement(self):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
liste_beobachtungen = cursor.execute(f"SELECT beobachtungenID, punktnummer_sp, punktnummer_zp, niv_dh, niv_strecke, niv_anz_standpkte FROM Beobachtungen WHERE niv_dh IS NOT NULL AND niv_strecke IS NOT NULL AND niv_anz_standpkte IS NOT NULL").fetchall()
|
||||
cursor.close()
|
||||
con.close()
|
||||
return liste_beobachtungen
|
||||
|
||||
def get_datumskoordinate(self):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
@@ -481,3 +489,11 @@ class Datenbankzugriff:
|
||||
con.close()
|
||||
return liste_gnss_beobachtungen
|
||||
|
||||
def get_nivellement_beobachtungen_punktnummern(self):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
liste_nivellement_beobachtungen = cursor.execute(f"SELECT beobachtungenID, punktnummer_sp, punktnummer_zp FROM Beobachtungen WHERE niv_dh IS NOT NULL AND niv_strecke IS NOT NULL AND niv_anz_standpkte IS NOT NULL").fetchall()
|
||||
cursor.close()
|
||||
con.close()
|
||||
return liste_nivellement_beobachtungen
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import sympy as sp
|
||||
import numpy as np
|
||||
from typing import Iterable, List, Sequence, Tuple, Optional
|
||||
|
||||
|
||||
@@ -106,50 +107,13 @@ class Datumsfestlegung:
|
||||
sol = K.LUsolve(rhs)
|
||||
return sol[:u, :]
|
||||
|
||||
|
||||
|
||||
@staticmethod
|
||||
def weiches_datum(
|
||||
A: sp.Matrix,
|
||||
dl: sp.Matrix,
|
||||
Q_ll: sp.Matrix,
|
||||
x0: sp.Matrix,
|
||||
anschluss_indices: Sequence[int],
|
||||
anschluss_werte: sp.Matrix,
|
||||
Sigma_AA: Optional[sp.Matrix] = None,
|
||||
) -> Tuple[sp.Matrix, sp.Matrix, sp.Matrix]:
|
||||
if dl.cols != 1 or x0.cols != 1:
|
||||
raise ValueError("dl und x0 müssen Spaltenvektoren sein.")
|
||||
if A.rows != dl.rows:
|
||||
raise ValueError("A.rows muss dl.rows entsprechen.")
|
||||
if A.cols != x0.rows:
|
||||
raise ValueError("A.cols muss x0.rows entsprechen.")
|
||||
if Q_ll.rows != Q_ll.cols or Q_ll.rows != A.rows:
|
||||
raise ValueError("Q_ll muss (n×n) sein und zu A.rows passen.")
|
||||
|
||||
u = A.cols
|
||||
idx = [int(i) for i in anschluss_indices]
|
||||
m = len(idx)
|
||||
|
||||
if anschluss_werte.cols != 1 or anschluss_werte.rows != m:
|
||||
raise ValueError("anschluss_werte muss (m×1) sein.")
|
||||
if Sigma_AA is None:
|
||||
Sigma_AA = sp.eye(m)
|
||||
if Sigma_AA.rows != m or Sigma_AA.cols != m:
|
||||
raise ValueError("Sigma_AA muss (m×m) sein.")
|
||||
|
||||
A_A = sp.zeros(m, u)
|
||||
for r, j in enumerate(idx):
|
||||
if not (0 <= j < u):
|
||||
raise IndexError(f"Anschluss-Index {j} außerhalb [0,{u-1}]")
|
||||
A_A[r, j] = 1
|
||||
|
||||
x0_A = sp.Matrix([[x0[j, 0]] for j in idx])
|
||||
dl_A = anschluss_werte - x0_A
|
||||
|
||||
A_ext = A.col_join(A_A)
|
||||
dl_ext = dl.col_join(dl_A)
|
||||
|
||||
Q_ext = sp.zeros(Q_ll.rows + m, Q_ll.cols + m)
|
||||
Q_ext[:Q_ll.rows, :Q_ll.cols] = Q_ll
|
||||
Q_ext[Q_ll.rows:, Q_ll.cols:] = Sigma_AA
|
||||
|
||||
return A_ext, dl_ext, Q_ext
|
||||
def weiches_datum(Q_ll: np.ndarray, Q_AA: np.ndarray) -> np.ndarray:
|
||||
if Q_ll.ndim != 2 or Q_ll.shape[0] != Q_ll.shape[1]:
|
||||
raise ValueError("Q_ll muss quadratisch sein.")
|
||||
if Q_AA.ndim != 2 or Q_AA.shape[0] != Q_AA.shape[1]:
|
||||
raise ValueError("Q_AA muss quadratisch sein.")
|
||||
Q_ext = np.block([[Q_ll, np.zeros((Q_ll.shape[0], Q_AA.shape[0]))],[np.zeros((Q_AA.shape[0], Q_ll.shape[0])), Q_AA]])
|
||||
return Q_ext
|
||||
@@ -4,15 +4,18 @@ from Export import Export
|
||||
from Berechnungen import Berechnungen
|
||||
import numpy as np
|
||||
import importlib
|
||||
from Koordinatentransformationen import Transformationen
|
||||
|
||||
|
||||
|
||||
class FunktionalesModell:
|
||||
def __init__(self, pfad_datenbank, a, b):
|
||||
def __init__(self, pfad_datenbank, a, b, pfad_tif_quasigeoidundolation = None):
|
||||
self.pfad_datenbank = pfad_datenbank
|
||||
self.a = a
|
||||
self.b = b
|
||||
self.berechnungen = Berechnungen(self.a, self.b)
|
||||
self.trafos = Transformationen(pfad_datenbank)
|
||||
self.pfad_tif_quasigeoidundolation = pfad_tif_quasigeoidundolation
|
||||
self.substitutionen_dict = self.dict_substitutionen_uebergeordnetes_system()
|
||||
self.dict_punkt_symbole = {}
|
||||
self.liste_symbole_lambdify = sorted(self.substitutionen_dict.keys(), key=lambda s: str(s))
|
||||
@@ -21,14 +24,18 @@ class FunktionalesModell:
|
||||
self.func_u0 = None
|
||||
self.liste_beobachtungsvektor_symbolisch = None
|
||||
|
||||
|
||||
def jacobi_matrix_symbolisch(self, datumsfestlegung = None, liste_unbekannte_datumsfestlegung = None):
|
||||
#liste_beobachtungsarten = ["tachymeter_distanz", "tachymeter_richtung", "tachymeter_zenitwinkel"]
|
||||
|
||||
liste_beobachtungsarten = ["tachymeter_distanz", "tachymeter_richtung", "tachymeter_zenitwinkel", "gnss_basislinien"]
|
||||
liste_beobachtungsarten = ["tachymeter_distanz", "tachymeter_richtung", "tachymeter_zenitwinkel", "gnss_basislinien", "geometrisches_nivellement"]
|
||||
#liste_beobachtungsarten = ["tachymeter_distanz", "tachymeter_richtung", "tachymeter_zenitwinkel",
|
||||
# "gnss_basislinien"]
|
||||
db_zugriff = Datenbankzugriff(self.pfad_datenbank)
|
||||
|
||||
liste_beobachtungen_rohdaten_gnssbasislinien = []
|
||||
liste_beobachtungen_rohdaten_tachymeter = []
|
||||
liste_beobachtungen_rohdaten_nivellement = []
|
||||
liste_punktnummern =[]
|
||||
|
||||
liste_orientierungsunbekannte = []
|
||||
@@ -65,6 +72,20 @@ class FunktionalesModell:
|
||||
if zielpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(zielpunkt)
|
||||
|
||||
if beobachtungsart == "geometrisches_nivellement":
|
||||
liste_id_standpunkt_zielpunkt = db_zugriff.get_nivellement_beobachtungen_punktnummern()
|
||||
|
||||
for beobachtungenID, standpunkt, zielpunkt in liste_id_standpunkt_zielpunkt:
|
||||
standpunkt = str(standpunkt).strip()
|
||||
zielpunkt = str(zielpunkt).strip()
|
||||
liste_beobachtungen_rohdaten_nivellement.append(
|
||||
(beobachtungsart, beobachtungenID, standpunkt, zielpunkt))
|
||||
|
||||
if standpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(standpunkt)
|
||||
if zielpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(zielpunkt)
|
||||
|
||||
|
||||
|
||||
#if liste_beobachtungen_rohdaten_tachymeter == []:
|
||||
@@ -100,6 +121,10 @@ class FunktionalesModell:
|
||||
liste_A_gnssbasislinien_zeilen = []
|
||||
liste_zeilenbeschriftungen_gnssbasislinien = []
|
||||
|
||||
liste_beobachtungsgleichungen_nivellement = []
|
||||
liste_A_nivellement_zeilen = []
|
||||
liste_zeilenbeschriftungen_nivellement = []
|
||||
|
||||
if liste_beobachtungen_rohdaten_tachymeter != []:
|
||||
for beobachtungsart, beobachtungenID, beobachtungsgruppeID, standpunkt, zielpunkt in liste_beobachtungen_rohdaten_tachymeter:
|
||||
X_sp, Y_sp, Z_sp = self.dict_punkt_symbole[standpunkt]
|
||||
@@ -181,7 +206,7 @@ class FunktionalesModell:
|
||||
zeile_A_Matrix.append(0)
|
||||
|
||||
liste_A_zenitwinkel_zeilen.append(zeile_A_Matrix)
|
||||
liste_zeilenbeschriftungen_richtung.append(
|
||||
liste_zeilenbeschriftungen_zenitwinkel.append(
|
||||
f"{beobachtungenID}_ZW_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}"
|
||||
)
|
||||
|
||||
@@ -204,6 +229,39 @@ class FunktionalesModell:
|
||||
liste_zeilenbeschriftungen_gnssbasislinien.append(
|
||||
f"{beobachtungenID}_gnssbz_{standpunkt}_{zielpunkt}")
|
||||
|
||||
if liste_beobachtungen_rohdaten_nivellement != []:
|
||||
for beobachtungsart, beobachtungenID, standpunkt, zielpunkt in liste_beobachtungen_rohdaten_nivellement:
|
||||
X_sp, Y_sp, Z_sp = self.dict_punkt_symbole[standpunkt]
|
||||
X_zp, Y_zp, Z_zp = self.dict_punkt_symbole[zielpunkt]
|
||||
B_sp, L_sp = sp.symbols(f"B{standpunkt} L{standpunkt}")
|
||||
B_zp, L_zp = sp.symbols(f"B{zielpunkt} L{zielpunkt}")
|
||||
|
||||
if beobachtungsart == "geometrisches_nivellement":
|
||||
|
||||
d_r_dX_zp = sp.cos(B_zp) * sp.cos(L_zp)
|
||||
d_r_dX_sp = -sp.cos(B_sp) * sp.cos(L_sp)
|
||||
d_r_dY_zp = sp.cos(B_zp) * sp.sin(L_zp)
|
||||
d_r_dY_sp = -sp.cos(B_sp) * sp.sin(L_sp)
|
||||
d_r_dZ_zp = sp.sin(B_zp)
|
||||
d_r_dZ_sp = -sp.sin(B_sp)
|
||||
|
||||
zeile_A_Matrix = []
|
||||
for punkt in liste_punktnummern:
|
||||
if punkt == standpunkt:
|
||||
zeile_A_Matrix.extend([d_r_dX_sp, d_r_dY_sp, d_r_dZ_sp])
|
||||
elif punkt == zielpunkt:
|
||||
zeile_A_Matrix.extend([d_r_dX_zp, d_r_dY_zp, d_r_dZ_zp])
|
||||
else:
|
||||
zeile_A_Matrix.extend([0, 0, 0])
|
||||
|
||||
for orientierung in liste_orientierungsunbekannte:
|
||||
zeile_A_Matrix.append(0)
|
||||
|
||||
liste_A_nivellement_zeilen.append(zeile_A_Matrix)
|
||||
liste_zeilenbeschriftungen_nivellement.append(
|
||||
f"{beobachtungenID}_niv_{standpunkt}_{zielpunkt}"
|
||||
)
|
||||
|
||||
if liste_beobachtungsgleichungen_distanz:
|
||||
f_matrix_dist = sp.Matrix(liste_beobachtungsgleichungen_distanz)
|
||||
unbekanntenvektor = sp.Matrix(liste_unbekannte)
|
||||
@@ -228,6 +286,15 @@ class FunktionalesModell:
|
||||
else:
|
||||
A_gnssbasislinien = None
|
||||
|
||||
if liste_A_nivellement_zeilen:
|
||||
#f_matrix_nivellement = sp.Matrix(liste_beobachtungsgleichungen_nivellement)
|
||||
#unbekanntenvektor = sp.Matrix(liste_unbekannte)
|
||||
#A_nivellement = f_matrix_nivellement.jacobian(unbekanntenvektor)
|
||||
A_nivellement = sp.Matrix(liste_A_nivellement_zeilen)
|
||||
else:
|
||||
A_nivellement = None
|
||||
|
||||
|
||||
A_gesamt = None
|
||||
liste_zeilenbeschriftungen_gesamt = []
|
||||
|
||||
@@ -256,6 +323,13 @@ class FunktionalesModell:
|
||||
A_gesamt = A_gesamt.col_join(A_gnssbasislinien)
|
||||
liste_zeilenbeschriftungen_gesamt.extend(liste_zeilenbeschriftungen_gnssbasislinien)
|
||||
|
||||
if A_nivellement is not None:
|
||||
if A_gesamt is None:
|
||||
A_gesamt = A_nivellement
|
||||
else:
|
||||
A_gesamt = A_gesamt.col_join(A_nivellement)
|
||||
liste_zeilenbeschriftungen_gesamt.extend(liste_zeilenbeschriftungen_nivellement)
|
||||
|
||||
if A_gesamt is None:
|
||||
return None
|
||||
|
||||
@@ -451,6 +525,18 @@ class FunktionalesModell:
|
||||
if beobachtungsart == "gnssbz":
|
||||
liste_beobachtungsgleichungen.append(dZ)
|
||||
|
||||
if aufgeteilt[1] == "niv":
|
||||
beobachtungsart = aufgeteilt[1]
|
||||
standpunkt = str(aufgeteilt[2]).strip()
|
||||
zielpunkt = str(aufgeteilt[3]).strip()
|
||||
|
||||
nh_sp = sp.Symbol(f"NH{standpunkt}")
|
||||
nh_zp = sp.Symbol(f"NH{zielpunkt}")
|
||||
|
||||
niv_sp_zp = nh_zp - nh_sp
|
||||
|
||||
liste_beobachtungsgleichungen.append(niv_sp_zp)
|
||||
|
||||
beobachtungsvektor_naeherung_symbolisch = sp.Matrix(liste_beobachtungsgleichungen)
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Beobachtungsvektor_Näherung_Symbolisch.csv", [""],
|
||||
liste_beobachtungsvektor_symbolisch, beobachtungsvektor_naeherung_symbolisch, "Beobachtungsvektor")
|
||||
@@ -570,8 +656,14 @@ class FunktionalesModell:
|
||||
|
||||
liste_beobachtungen_tachymeter = db_zugriff.get_beobachtungen_from_beobachtungenid()
|
||||
liste_beobachtungen_gnssbasislinien = db_zugriff.get_beobachtungen_gnssbasislinien()
|
||||
liste_azimut_richtungen, dict_orientierungen = self.berechnungen.berechnung_richtung_azimut_zenitwinkel(self.pfad_datenbank, dict_koordinaten)
|
||||
dict_koordinaten_B_L = self.berechnungen.geometrische_breite_laenge(dict_koordinaten)
|
||||
liste_beobachtungen_nivellemente = db_zugriff.get_beobachtungen_nivellement()
|
||||
liste_azimut_richtungen, dict_orientierungen = berechnungen.berechnung_richtung_azimut_zenitwinkel(self.pfad_datenbank, dict_koordinaten)
|
||||
dict_koordinaten_xyz_kopie = {pn: [v[0], v[1], v[2]] for pn, v in dict_koordinaten.items()}
|
||||
dict_koordinaten_B_L = berechnungen.geometrische_breite_laenge(dict_koordinaten_xyz_kopie)
|
||||
|
||||
dict_koordinaten_utm = self.trafos.ecef_to_utm(
|
||||
dict_koordinaten,
|
||||
self.pfad_tif_quasigeoidundolation)
|
||||
|
||||
substitutionen = {}
|
||||
|
||||
@@ -589,7 +681,7 @@ class FunktionalesModell:
|
||||
# O_sym = sp.symbols(f"O_{beobachtungsgruppeID}")
|
||||
# substitutionen[O_sym] = float(orientierung)
|
||||
|
||||
for beobachtungsgruppeID, standpunkt, zielpunkt, azimut, richtung, zenitwinkel, schraegstrecke in liste_azimut_richtungen:
|
||||
for beobachtungsgruppeID, standpunkt, zielpunkt, azimut, richtung, zenitwinkel, schraegstrecke, orientierung in liste_azimut_richtungen:
|
||||
richtung_sym = sp.symbols(f"richtung_berechnet_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
substitutionen[richtung_sym] = float(richtung)
|
||||
|
||||
@@ -602,6 +694,10 @@ class FunktionalesModell:
|
||||
schraegstrecke_sym = sp.symbols(f"strecke_berechnet_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
substitutionen[schraegstrecke_sym] = float(schraegstrecke)
|
||||
|
||||
for punktnummer, koordinaten_utm in dict_koordinaten_utm.items():
|
||||
normalhoehe_sym = sp.symbols(f"NH{punktnummer}")
|
||||
substitutionen[normalhoehe_sym] = float(koordinaten_utm[2])
|
||||
|
||||
|
||||
|
||||
for standpunkt, zielpunkt, beobachtungenID, beobachtungsgruppeID, tachymeter_richtung, tachymeter_zenitwinkel, tachymeter_distanz in liste_beobachtungen_tachymeter:
|
||||
@@ -615,7 +711,7 @@ class FunktionalesModell:
|
||||
substitutionen[alpha] = float(tachymeter_richtung)
|
||||
substitutionen[zw] = float(tachymeter_zenitwinkel)
|
||||
substitutionen[s] = float(tachymeter_distanz)
|
||||
substitutionen[sp.Symbol(f"O{beobachtungsgruppeID}")] = 0.0
|
||||
#substitutionen[sp.Symbol(f"O{beobachtungsgruppeID}")] = 0.0
|
||||
|
||||
for beobachtungenID, punktnummer_sp, punktnummer_zp, gnss_bx, gnss_by, gnss_bz, gnss_s0, gnss_cxx, gnss_cxy, gnss_cxz, gnss_cyy, gnss_cyz, gnss_czz in liste_beobachtungen_gnssbasislinien:
|
||||
beobachtungenID = str(beobachtungenID).strip()
|
||||
@@ -633,6 +729,18 @@ class FunktionalesModell:
|
||||
substitutionen[by] = float(gnss_by)
|
||||
substitutionen[bz] = float(gnss_bz)
|
||||
|
||||
for beobachtungenID, punktnummer_sp, punktnummer_zp, niv_dh, niv_strecke, niv_anz_standpkte in liste_beobachtungen_nivellemente:
|
||||
beobachtungenID = str(beobachtungenID).strip()
|
||||
punktnummer_sp = str(punktnummer_sp).strip()
|
||||
punktnummer_zp = str(punktnummer_zp).strip()
|
||||
|
||||
niv = sp.symbols(f"{beobachtungenID}_niv_{punktnummer_sp}_{punktnummer_zp}")
|
||||
|
||||
if niv_dh is None:
|
||||
continue
|
||||
|
||||
substitutionen[niv] = float(niv_dh)
|
||||
|
||||
if unbekanntenvektor_aus_iteration is not None:
|
||||
dict_O = self.unbekanntenvektor_numerisch_to_dict_orientierungen(
|
||||
self.liste_unbekanntenvektor_symbolisch,
|
||||
@@ -641,10 +749,14 @@ class FunktionalesModell:
|
||||
for orientierungs_id, wert in dict_O.items():
|
||||
substitutionen[sp.Symbol(f"O{orientierungs_id}")] = float(wert)
|
||||
else:
|
||||
for standpunkt, zielpunkt, beobachtungenID, beobachtungsgruppeID, *_ in liste_beobachtungen_tachymeter:
|
||||
#for standpunkt, zielpunkt, beobachtungenID, beobachtungsgruppeID, *_ in liste_beobachtungen_tachymeter:
|
||||
# O_sym = sp.Symbol(f"O{beobachtungsgruppeID}")
|
||||
# if O_sym not in substitutionen:
|
||||
# substitutionen[O_sym] = 0
|
||||
for beobachtungsgruppeID, standpunkt, zielpunkt, azimut, richtung, zenitwinkel, schraegstrecke, orientierung in liste_azimut_richtungen:
|
||||
O_sym = sp.Symbol(f"O{beobachtungsgruppeID}")
|
||||
if O_sym not in substitutionen:
|
||||
substitutionen[O_sym] = 0
|
||||
substitutionen[O_sym] = orientierung
|
||||
|
||||
return substitutionen
|
||||
|
||||
|
||||
@@ -249,11 +249,13 @@ class Import:
|
||||
if pfad_datei in liste_dateinamen_in_db:
|
||||
Import_fortsetzen = False
|
||||
print(f"Der Import wurde abgebrochen, weil die Beobachtungen aus der Datei {pfad_datei} bereits in der Datenbank vorhanden sind.")
|
||||
return None, None
|
||||
|
||||
if instrumentenID not in liste_instrumentenid:
|
||||
Import_fortsetzen = False
|
||||
print(
|
||||
"Der Import wurde abgebrochen. Bitte eine gültige InstrumentenID eingeben. Bei Bedarf ist das Instrument neu anzulegen.")
|
||||
return None, None
|
||||
|
||||
if Import_fortsetzen:
|
||||
# Berechnete Punkthöhe Importieren
|
||||
@@ -305,6 +307,13 @@ class Import:
|
||||
return dict_punkt_mittelwert_punkthoehen, liste_punktnummern_in_db
|
||||
|
||||
def import_beobachtungen_nivellement_naeherung_punkthoehen(self, dict_punkt_mittelwert_punkthoehen, liste_punktnummern_in_db, liste_punktnummern_hinzufuegen):
|
||||
Import_fortsetzen = True
|
||||
|
||||
if dict_punkt_mittelwert_punkthoehen == None or liste_punktnummern_in_db == None or liste_punktnummern_hinzufuegen == None:
|
||||
Import_fortsetzen = False
|
||||
print("Der Import der Nivellementbeobachtungen wurde abgebrochen.")
|
||||
return None
|
||||
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
|
||||
|
||||
@@ -2,6 +2,10 @@ import sympy as sp
|
||||
from sympy.algebras.quaternion import Quaternion
|
||||
import Datenbank
|
||||
from itertools import combinations
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
from pyproj import CRS, Transformer, datadir
|
||||
import numpy as np
|
||||
|
||||
|
||||
class Transformationen:
|
||||
@@ -280,3 +284,121 @@ class Transformationen:
|
||||
])
|
||||
return dict_transformiert
|
||||
|
||||
def utm_to_XYZ(self, pfad_tif_quasigeoidundolation, liste_utm):
|
||||
pfad_gcg_tif = Path(pfad_tif_quasigeoidundolation)
|
||||
pfad_gcg_tif_proj = pfad_gcg_tif.with_name("de_bkg_gcg2016.tif")
|
||||
|
||||
if (not pfad_gcg_tif_proj.exists()) or (pfad_gcg_tif_proj.stat().st_size != pfad_gcg_tif.stat().st_size):
|
||||
shutil.copy2(pfad_gcg_tif, pfad_gcg_tif_proj)
|
||||
|
||||
datadir.append_data_dir(str(pfad_gcg_tif.parent))
|
||||
|
||||
utm_epsg = 25832
|
||||
crs_src = CRS.from_user_input(f"EPSG:{utm_epsg}+EPSG:7837") # ETRS89/DREF91 + DHHN2016
|
||||
crs_dst = CRS.from_epsg(4936) # ETRS89 geozentrisch (ECEF)
|
||||
|
||||
tr_best = Transformer.from_crs(
|
||||
crs_src,
|
||||
crs_dst,
|
||||
always_xy=True,
|
||||
allow_ballpark=False,
|
||||
)
|
||||
|
||||
dict_geozentrisch_kartesisch = {}
|
||||
for Punktnummer, E, N, Normalhoehe in liste_utm:
|
||||
X, Y, Z = tr_best.transform(E, N, Normalhoehe)
|
||||
dict_geozentrisch_kartesisch[Punktnummer] = sp.Matrix([X, Y, Z])
|
||||
|
||||
# geographisch 3D + zeta
|
||||
#crs_geog3d = CRS.from_epsg(4937) # ETRS89 (lon, lat, h)
|
||||
#tr_h = Transformer.from_crs(
|
||||
# crs_src,
|
||||
# crs_geog3d,
|
||||
# always_xy=True,
|
||||
# allow_ballpark=False,
|
||||
#)
|
||||
|
||||
#lon, lat, h = tr_h.transform(E, N, H)
|
||||
#print("lon/lat/h:", lon, lat, h)
|
||||
#print("zeta (h-H):", h - H)
|
||||
|
||||
return dict_geozentrisch_kartesisch
|
||||
|
||||
def ecef_to_utm(
|
||||
self,
|
||||
dict_koordinaten: dict,
|
||||
pfad_gcg_tif: str | Path | None = None,
|
||||
zone: int = 32,
|
||||
):
|
||||
|
||||
if pfad_gcg_tif is not None:
|
||||
pfad_gcg_tif = Path(pfad_gcg_tif).resolve()
|
||||
if not pfad_gcg_tif.exists():
|
||||
raise FileNotFoundError(f"Quasigeoid-Datei nicht gefunden: {pfad_gcg_tif}")
|
||||
|
||||
pfad_proj_grid = pfad_gcg_tif.with_name("de_bkg_gcg2016.tif")
|
||||
if (
|
||||
not pfad_proj_grid.exists()
|
||||
or pfad_proj_grid.stat().st_size != pfad_gcg_tif.stat().st_size
|
||||
):
|
||||
shutil.copy2(pfad_gcg_tif, pfad_proj_grid)
|
||||
|
||||
datadir.append_data_dir(str(pfad_proj_grid.parent))
|
||||
|
||||
crs_src = CRS.from_epsg(4936) # ETRS89 geocentric (ECEF)
|
||||
|
||||
# Ziel-CRS: ETRS89 / UTM Zone 32/33 + DHHN2016 Normalhöhe
|
||||
# EPSG:25832/25833 = ETRS89 / UTM; EPSG:7837 = DHHN2016 height
|
||||
utm_epsg = 25800 + zone # 25832 oder 25833
|
||||
crs_dst = CRS.from_user_input(f"EPSG:{utm_epsg}+EPSG:7837")
|
||||
|
||||
tr = Transformer.from_crs(
|
||||
crs_src,
|
||||
crs_dst,
|
||||
always_xy=True,
|
||||
allow_ballpark=False,
|
||||
)
|
||||
|
||||
dict_koordinaten_utm = {}
|
||||
for punktnummer, koordinate in dict_koordinaten.items():
|
||||
werte = []
|
||||
queue = [koordinate]
|
||||
while queue and len(werte) < 3:
|
||||
v = queue.pop(0)
|
||||
|
||||
# Sympy Matrix
|
||||
if isinstance(v, sp.Matrix):
|
||||
if v.rows * v.cols == 1:
|
||||
queue.insert(0, v[0])
|
||||
else:
|
||||
queue = list(np.array(v.tolist(), dtype=object).reshape(-1)) + queue
|
||||
continue
|
||||
|
||||
# numpy array
|
||||
if isinstance(v, np.ndarray):
|
||||
if v.size == 1:
|
||||
queue.insert(0, v.reshape(-1)[0])
|
||||
else:
|
||||
queue = list(v.reshape(-1)) + queue
|
||||
continue
|
||||
|
||||
# Liste / Tuple
|
||||
if isinstance(v, (list, tuple)):
|
||||
if len(v) == 1:
|
||||
queue.insert(0, v[0])
|
||||
else:
|
||||
queue = list(v) + queue
|
||||
continue
|
||||
|
||||
# Skalar
|
||||
werte.append(float(v))
|
||||
|
||||
if len(werte) < 3:
|
||||
raise ValueError(f"Zu wenig skalare Werte gefunden: {werte}")
|
||||
|
||||
X, Y, Z = werte[0], werte[1], werte[2]
|
||||
|
||||
E, N, H = tr.transform(X, Y, Z)
|
||||
# Runden, weil ansonsten aufgrund begrenzter Rechenkapazität falsche Werte Resultieren
|
||||
dict_koordinaten_utm[punktnummer] = (round(E, 8), round(N, 8), round(H, 8))
|
||||
return dict_koordinaten_utm
|
||||
@@ -1,6 +1,7 @@
|
||||
import numpy as np
|
||||
import plotly.graph_objects as go
|
||||
from scipy.stats import f as f_dist
|
||||
import pandas as pd
|
||||
|
||||
|
||||
class Genauigkeitsmaße:
|
||||
@@ -14,55 +15,136 @@ class Genauigkeitsmaße:
|
||||
return float(s0apost)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def berechne_helmert_punktfehler_3D(Qxx_matrix: np.ndarray, s0apost: float, punkt_namen: list) -> dict:
|
||||
helmert_punktfehler_ergebnisse_3D = {}
|
||||
diag_Q = np.diag(Qxx_matrix)
|
||||
if len(diag_Q) < len(punkt_namen) * 3:
|
||||
raise ValueError("Die Matrix Qxx ist zu klein für die Anzahl der Punkte (3D erwartet).")
|
||||
for i, name in enumerate(punkt_namen):
|
||||
idx_x, idx_y, idx_z = 3 * i, 3 * i + 1, 3 * i + 2
|
||||
q_xx, q_yy, q_zz = diag_Q[idx_x], diag_Q[idx_y], diag_Q[idx_z]
|
||||
helmert_punktfehler_3D = s0apost * np.sqrt(q_xx + q_yy + q_zz)
|
||||
helmert_punktfehler_ergebnisse_3D[name] = round(float(helmert_punktfehler_3D), 4)
|
||||
return helmert_punktfehler_ergebnisse_3D
|
||||
|
||||
def helmert_punktfehler(Qxx, s0_apost, unbekannten_liste, dim=3):
|
||||
diagQ = np.diag(Qxx)
|
||||
daten = []
|
||||
|
||||
n_punkte = len(unbekannten_liste) // 3
|
||||
|
||||
for i in range(n_punkte):
|
||||
sym_x = str(unbekannten_liste[3 * i]) # z.B. "X10009"
|
||||
punkt = sym_x[1:] # -> "10009"
|
||||
|
||||
qx = diagQ[3 * i]
|
||||
qy = diagQ[3 * i + 1]
|
||||
qz = diagQ[3 * i + 2]
|
||||
|
||||
sx = s0_apost * np.sqrt(qx)
|
||||
sy = s0_apost * np.sqrt(qy)
|
||||
sz = s0_apost * np.sqrt(qz)
|
||||
|
||||
if dim == 2:
|
||||
sP = s0_apost * np.sqrt(qx + qy)
|
||||
else:
|
||||
sP = s0_apost * np.sqrt(qx + qy + qz)
|
||||
|
||||
daten.append([
|
||||
punkt,
|
||||
float(sx),
|
||||
float(sy),
|
||||
float(sz),
|
||||
float(sP)
|
||||
])
|
||||
helmert_punktfehler = pd.DataFrame(daten, columns=["Punkt", "σx", "σy", "σz", f"σP_{dim}D"])
|
||||
return helmert_punktfehler
|
||||
|
||||
|
||||
@staticmethod
|
||||
def berechne_standardellipsen(Qxx: np.ndarray, s0: float, punkt_namen: list):
|
||||
standardellipsen = []
|
||||
for i, name in enumerate(punkt_namen):
|
||||
ix, iy = 3 * i, 3 * i + 1
|
||||
qxx, qyy, qxy = Qxx[ix, ix], Qxx[iy, iy], Qxx[ix, iy]
|
||||
k = np.sqrt((qxx - qyy) ** 2 + 4 * qxy ** 2)
|
||||
qa, qb = 0.5 * (qxx + qyy + k), 0.5 * (qxx + qyy - k)
|
||||
a, b = s0 * np.sqrt(qa), s0 * np.sqrt(qb)
|
||||
theta = 0.5 * np.arctan2(2 * qxy, qxx - qyy)
|
||||
standardellipsen.append({
|
||||
"name": name, "a": a, "b": b, "theta": theta, "prob": 0.39 # Standard ca. 39%
|
||||
})
|
||||
return standardellipsen
|
||||
|
||||
def standardellipse(Qxx, s0_apost, unbekannten_liste, dim_labels=3):
|
||||
Qxx = np.asarray(Qxx, float)
|
||||
data = []
|
||||
|
||||
n_punkte = len(unbekannten_liste) // dim_labels
|
||||
|
||||
for i in range(n_punkte):
|
||||
sym_x = str(unbekannten_liste[dim_labels * i]) # z.B. "X10009"
|
||||
punkt = sym_x[1:] # -> "10009"
|
||||
|
||||
ix = dim_labels * i
|
||||
iy = dim_labels * i + 1
|
||||
|
||||
# 2x2-Kofaktorblock
|
||||
Qxx_ = Qxx[ix, ix]
|
||||
Qyy_ = Qxx[iy, iy]
|
||||
Qyx_ = Qxx[iy, ix]
|
||||
|
||||
# Standardabweichungen der Koordinatenkomponenten
|
||||
sx = s0_apost * np.sqrt(Qxx_)
|
||||
sy = s0_apost * np.sqrt(Qyy_)
|
||||
sxy = (s0_apost ** 2) * Qyx_
|
||||
|
||||
# k und Eigenwerte (Q_dmax, Q_dmin)
|
||||
k = np.sqrt((Qxx_ - Qyy_) ** 2 + 4 * (Qyx_ ** 2))
|
||||
Q_dmax = 0.5 * (Qxx_ + Qyy_ + k)
|
||||
Q_dmin = 0.5 * (Qxx_ + Qyy_ - k)
|
||||
|
||||
# Halbachsen (Standardabweichungen entlang Hauptachsen)
|
||||
s_max = s0_apost * np.sqrt(Q_dmax)
|
||||
s_min = s0_apost * np.sqrt(Q_dmin)
|
||||
|
||||
# Richtungswinkel theta (Hauptachse) in rad:
|
||||
theta_rad = 0.5 * np.arctan2(2 * Qyx_, (Qxx_ - Qyy_))
|
||||
|
||||
# in gon
|
||||
theta_gon = theta_rad * (200 / np.pi)
|
||||
if theta_gon < 0:
|
||||
theta_gon += 200.0
|
||||
|
||||
data.append([
|
||||
punkt,
|
||||
float(sx), float(sy), float(sxy),
|
||||
float(s_max), float(s_min),
|
||||
float(theta_gon)
|
||||
])
|
||||
|
||||
standardellipse = pd.DataFrame(data, columns=["Punkt", "σx", "σy", "σxy", "s_max", "s_min", "θ [gon]"])
|
||||
return standardellipse
|
||||
|
||||
|
||||
@staticmethod
|
||||
def berechne_konfidenzellipsen(Qxx: np.ndarray, s0: float, r: int, punkt_namen: list,
|
||||
wahrscheinlichkeit: float = 0.95):
|
||||
# Quantil der F-Verteilung (df1=2 für die Ebene, df2=r für Redundanz)
|
||||
f_quantil = f_dist.ppf(wahrscheinlichkeit, 2, r)
|
||||
k_faktor = np.sqrt(2 * f_quantil)
|
||||
|
||||
standard_ellipsen = Genauigkeitsmaße.berechne_standardellipsen(Qxx, s0, punkt_namen)
|
||||
konfidenz_ellipsen = []
|
||||
for ell in standard_ellipsen:
|
||||
konfidenz_ellipsen.append({
|
||||
"name": ell['name'],
|
||||
"a": ell['a'] * k_faktor,
|
||||
"b": ell['b'] * k_faktor,
|
||||
"theta": ell['theta'],
|
||||
"prob": wahrscheinlichkeit,
|
||||
"k_faktor": k_faktor
|
||||
})
|
||||
return konfidenz_ellipsen
|
||||
def konfidenzellipse(Qxx, s0_apost, unbekannten_liste, R, alpha=0.05):
|
||||
Qxx = np.asarray(Qxx, float)
|
||||
|
||||
data = []
|
||||
n_punkte = len(unbekannten_liste) // 3 # X,Y,Z je Punkt angenommen
|
||||
|
||||
k = float(np.sqrt(2.0 * f_dist.ppf(1.0 - alpha, 2, R)))
|
||||
|
||||
for i in range(n_punkte):
|
||||
punkt = str(unbekannten_liste[3 * i])[1:] # "X10009" -> "10009"
|
||||
|
||||
ix = 3 * i
|
||||
iy = 3 * i + 1
|
||||
|
||||
Qxx_ = Qxx[ix, ix]
|
||||
Qyy_ = Qxx[iy, iy]
|
||||
Qxy_ = Qxx[iy, ix] # = Qyx
|
||||
|
||||
# k für Eigenwerte
|
||||
kk = np.sqrt((Qxx_ - Qyy_) ** 2 + 4 * (Qxy_ ** 2))
|
||||
Q_dmax = 0.5 * (Qxx_ + Qyy_ + kk)
|
||||
Q_dmin = 0.5 * (Qxx_ + Qyy_ - kk)
|
||||
|
||||
# Standard-Halbachsen (1-sigma)
|
||||
s_max = s0_apost * np.sqrt(Q_dmax)
|
||||
s_min = s0_apost * np.sqrt(Q_dmin)
|
||||
|
||||
# Orientierung (Hauptachse) in gon
|
||||
theta_rad = 0.5 * np.arctan2(2 * Qxy_, (Qxx_ - Qyy_))
|
||||
theta_gon = theta_rad * (200 / np.pi)
|
||||
if theta_gon < 0:
|
||||
theta_gon += 200.0
|
||||
|
||||
# Konfidenz-Halbachsen
|
||||
a_K = k * s_max
|
||||
b_K = k * s_min
|
||||
|
||||
data.append([punkt, float(a_K), float(b_K), float(theta_gon)])
|
||||
|
||||
konfidenzellipsen = pd.DataFrame(data, columns=["Punkt", "a_K", "b_K", "θ [gon]"])
|
||||
return konfidenzellipsen
|
||||
|
||||
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,53 +1,29 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Sequence, List, Dict
|
||||
import sympy as sp
|
||||
|
||||
import numpy as np
|
||||
from scipy import stats
|
||||
from scipy.stats import norm
|
||||
import pandas as pd
|
||||
|
||||
@dataclass
|
||||
class Zuverlaessigkeit:
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
def berechne_redundanzanteile(res_dict: dict, beobachtungen_labels: list) -> pd.DataFrame:
|
||||
|
||||
# 1. Redundanzmatrix R abrufen
|
||||
# R = Qvv * P
|
||||
R = res_dict.get("R")
|
||||
|
||||
if R is None:
|
||||
raise ValueError("Die Redundanzmatrix R wurde im res_dict nicht gefunden.")
|
||||
|
||||
# 2. Diagonalelemente extrahieren (das sind die r_i Werte)
|
||||
r_anteile = np.diag(R)
|
||||
|
||||
# 3. Ergebnisse in einem Pandas DataFrame zusammenfassen
|
||||
df_redundanz = pd.DataFrame({
|
||||
"Beobachtung": beobachtungen_labels,
|
||||
"r_i": np.round(r_anteile, 4), # Dezimal (0 bis 1)
|
||||
"r_i_prozent": np.round(r_anteile * 100, 2) # In Prozent (0 bis 100%)
|
||||
})
|
||||
|
||||
return df_redundanz
|
||||
def gesamtredundanz(n, u):
|
||||
r = n - u
|
||||
return r
|
||||
|
||||
|
||||
|
||||
def redundanzanalyse(self, r_vec: Sequence[float]) -> Dict[str, object]:
|
||||
r_s = [sp.sympify(r) for r in r_vec]
|
||||
EVi = [float(r * 100) for r in r_s]
|
||||
klassen = [self.klassifiziere_ri(float(r)) for r in r_s]
|
||||
|
||||
return {
|
||||
"r_i": [float(r) for r in r_s],
|
||||
"EVi": EVi,
|
||||
"klassen": klassen,
|
||||
"r_sum": float(sum(r_s)),
|
||||
"min_r": float(min(r_s)),
|
||||
"max_r": float(max(r_s)),
|
||||
}
|
||||
def berechne_R(Q_vv, P):
|
||||
R = Q_vv @ P
|
||||
return R #Redundanzmatrix
|
||||
|
||||
|
||||
def berechne_ri(R):
|
||||
ri = np.diag(R)
|
||||
EVi = 100.0 * ri
|
||||
return ri, EVi #Redundanzanteile
|
||||
|
||||
def klassifiziere_ri(self, ri: float) -> str:
|
||||
|
||||
def klassifiziere_ri(ri): #Klassifizierung der Redundanzanteile
|
||||
if ri < 0.01:
|
||||
return "nicht kontrollierbar"
|
||||
elif ri < 0.10:
|
||||
@@ -60,75 +36,131 @@ class Zuverlaessigkeit:
|
||||
return "nahezu vollständig redundant"
|
||||
|
||||
|
||||
def globaltest(r_gesamt, sigma0_apost, sigma0_apriori, alpha):
|
||||
T_G = (sigma0_apost ** 2) / (sigma0_apriori ** 2)
|
||||
F_krit = stats.f.ppf(1 - alpha, r_gesamt, 10 ** 9)
|
||||
H0 = T_G <= F_krit
|
||||
|
||||
def globaltest(self, sigma0_hat: float, sigma0_apriori: float, F_krit: float):
|
||||
s_hat = sp.sympify(sigma0_hat)
|
||||
s0 = sp.sympify(sigma0_apriori)
|
||||
Fk = sp.sympify(F_krit)
|
||||
|
||||
T_G = (s_hat**2) / (s0**2)
|
||||
H0 = bool(T_G <= Fk)
|
||||
if H0:
|
||||
interpretation = (
|
||||
"Nullhypothese H₀ angenommen.\n"
|
||||
)
|
||||
else:
|
||||
interpretation = (
|
||||
"Nullhypothese H₀ verworfen!\n"
|
||||
"Dies kann folgende Gründe haben:\n"
|
||||
"→ Es befinden sich grobe Fehler im Datenmaterial.\n"
|
||||
"→ Das funktionale Modell ist fehlerhaft.\n"
|
||||
"→ Das stochastische Modell ist zu optimistisch."
|
||||
)
|
||||
|
||||
return {
|
||||
"T_G": float(T_G),
|
||||
"F_krit": float(Fk),
|
||||
"r_gesamt": r_gesamt,
|
||||
"sigma0_apost": sigma0_apost,
|
||||
"sigma0_apriori": sigma0_apriori,
|
||||
"alpha": alpha,
|
||||
"T_G": T_G,
|
||||
"F_krit": F_krit,
|
||||
"H0_angenommen": H0,
|
||||
"Interpretation": interpretation,
|
||||
}
|
||||
|
||||
|
||||
def lokaltest_innere_Zuverlaessigkeit(v, Q_vv, ri, labels, s0_apost, alpha, beta):
|
||||
v = np.asarray(v, float).reshape(-1)
|
||||
Q_vv = np.asarray(Q_vv, float)
|
||||
ri = np.asarray(ri, float).reshape(-1)
|
||||
labels = list(labels)
|
||||
|
||||
def data_snooping(
|
||||
self,
|
||||
v: Sequence[float],
|
||||
Qv_diag: Sequence[float],
|
||||
r_vec: Sequence[float],
|
||||
sigma0_hat: float,
|
||||
k: float,
|
||||
) -> List[Dict[str, float | bool]]:
|
||||
# Standardabweichungen der Residuen
|
||||
qv = np.diag(Q_vv).astype(float)
|
||||
s_vi = float(s0_apost) * np.sqrt(qv)
|
||||
|
||||
v_s = [sp.sympify(x) for x in v]
|
||||
Qv_s = [sp.sympify(q) for q in Qv_diag]
|
||||
r_s = [sp.sympify(r) for r in r_vec]
|
||||
s0 = sp.sympify(sigma0_hat)
|
||||
k_s = sp.sympify(k)
|
||||
# Quantile k und kA (zweiseitig),
|
||||
k = float(norm.ppf(1 - alpha / 2))
|
||||
kA = float(norm.ppf(1 - beta)) # (Testmacht 1-β)
|
||||
|
||||
results = []
|
||||
# Nichtzentralitätsparameter δ0
|
||||
nzp = k + kA
|
||||
|
||||
for vi, Qvi, ri in zip(v_s, Qv_s, r_s):
|
||||
# Normierte Verbesserung NV
|
||||
NV = np.abs(v) / s_vi
|
||||
|
||||
s_vi = s0 * sp.sqrt(Qvi)
|
||||
NV_i = sp.Abs(vi) / s_vi
|
||||
# Grenzen für v_i
|
||||
v_grenz = k * s_vi
|
||||
v_min = -v_grenz
|
||||
v_max = v_grenz
|
||||
|
||||
if ri == 0:
|
||||
GRZW_i = sp.oo
|
||||
else:
|
||||
GRZW_i = (s_vi / ri) * k_s
|
||||
# Grobfehlerabschätzung:
|
||||
ri_safe = np.where(ri == 0, np.nan, ri)
|
||||
GF = -v / ri_safe
|
||||
|
||||
auff = bool(NV_i > k_s)
|
||||
# Grenzwert für die Aufdeckbarkeit eines GF (GRZW)
|
||||
GRZW_i = (s_vi / ri_safe) * k
|
||||
|
||||
results.append({
|
||||
"v_i": float(vi),
|
||||
"Qv_i": float(Qvi),
|
||||
"r_i": float(ri),
|
||||
"s_vi": float(s_vi),
|
||||
"NV_i": float(NV_i),
|
||||
"GRZW_i": float(GRZW_i if GRZW_i != sp.oo else float("inf")),
|
||||
"auffällig": auff,
|
||||
})
|
||||
auffaellig = NV > k
|
||||
|
||||
return results
|
||||
Lokaltest_innere_Zuv = pd.DataFrame({
|
||||
"Beobachtung": labels,
|
||||
"v_i": v,
|
||||
"r_i": ri,
|
||||
"s_vi": s_vi,
|
||||
"k": k,
|
||||
"NV_i": NV,
|
||||
"auffaellig": auffaellig,
|
||||
"v_min": v_min,
|
||||
"v_max": v_max,
|
||||
"GF_i": GF,
|
||||
"GRZW_v": v_grenz, # = k*s_vi
|
||||
"GRZW_i": GRZW_i, # = (s_vi/r_i)*k
|
||||
"alpha": alpha,
|
||||
"beta": beta,
|
||||
"kA": kA,
|
||||
"δ0": nzp,
|
||||
})
|
||||
return Lokaltest_innere_Zuv
|
||||
|
||||
|
||||
def EinflussPunktlage(df_lokaltest):
|
||||
df = df_lokaltest.copy()
|
||||
|
||||
def aeussere_zuverlaessigkeit_EF(self, r_vec: Sequence[float], delta0: float):
|
||||
delta = sp.sympify(delta0)
|
||||
EF_list = []
|
||||
for ri in r_vec:
|
||||
ri_s = sp.sympify(ri)
|
||||
if ri_s == 0:
|
||||
EF = sp.oo
|
||||
else:
|
||||
EF = sp.sqrt((1 - ri_s) / ri_s) * delta
|
||||
EF_list.append(float(EF if EF != sp.oo else float("inf")))
|
||||
r = df["r_i"].astype(float).to_numpy()
|
||||
GF = df["GF_i"].astype(float).to_numpy()
|
||||
nzp = df["δ0"].astype(float).to_numpy()
|
||||
|
||||
return EF_list
|
||||
EF = np.sqrt((1 - r) / r) * nzp
|
||||
EP = (1 - r) * GF
|
||||
|
||||
df["δ0"] = nzp
|
||||
df["EF_i"] = EF
|
||||
df["EP_i"] = EP
|
||||
|
||||
EinflussPunktlage = df[["Beobachtung", "r_i", "GF_i", "EF_i", "EP_i", "δ0", "alpha", "beta"]]
|
||||
return EinflussPunktlage
|
||||
|
||||
|
||||
def aeussere_zuverlaessigkeit_EF(Qxx, A, P, s0_apost, GRZW, labels):
|
||||
Qxx = np.asarray(Qxx, float)
|
||||
A = np.asarray(A, float)
|
||||
P = np.asarray(P, float)
|
||||
GRZW = np.asarray(GRZW, float).reshape(-1)
|
||||
labels = list(labels)
|
||||
|
||||
B = Qxx @ (A.T @ P)
|
||||
|
||||
EF = np.empty_like(GRZW, dtype=float)
|
||||
|
||||
# Für jede Beobachtung i: ∇x_i = B[:,i] * GRZW_i
|
||||
# EF_i^2 = (GRZW_i^2 * B_i^T Qxx^{-1} B_i) / s0^2
|
||||
for i in range(len(GRZW)):
|
||||
bi = B[:, i] # (u,)
|
||||
y = np.linalg.solve(Qxx, bi) # = Qxx^{-1} bi
|
||||
EF2 = (GRZW[i] ** 2) * float(bi @ y) / (float(s0_apost) ** 2)
|
||||
EF[i] = np.sqrt(EF2)
|
||||
|
||||
df = pd.DataFrame({
|
||||
"Beobachtung": labels,
|
||||
"GRZW_i": GRZW,
|
||||
"EF_i": EF
|
||||
})
|
||||
return df
|
||||
@@ -6,60 +6,34 @@ import numpy as np
|
||||
import Export
|
||||
|
||||
|
||||
def ausgleichung_global(
|
||||
A: sp.Matrix,
|
||||
dl: sp.Matrix,
|
||||
Q_ll: sp.Matrix,
|
||||
x0: sp.Matrix,
|
||||
idx_X, idx_Y, idx_Z,
|
||||
anschluss_indices,
|
||||
anschluss_werte,
|
||||
Sigma_AA,
|
||||
):
|
||||
# 1) Datumsfestlegung (weiches Datum) System erweitern
|
||||
A_ext, dl_ext, Q_ext = Datumsfestlegung.weiches_datum(
|
||||
A=A,
|
||||
dl=dl,
|
||||
Q_ll=Q_ll,
|
||||
x0=x0,
|
||||
anschluss_indices=anschluss_indices,
|
||||
anschluss_werte=anschluss_werte,
|
||||
Sigma_AA=Sigma_AA,
|
||||
)
|
||||
def ausgleichung_global(A, dl, Q_ext):
|
||||
A=np.asarray(A, float)
|
||||
dl = np.asarray(dl, float).reshape(-1, 1)
|
||||
Q_ext = np.asarray(Q_ext, float)
|
||||
|
||||
# 2) Gewichtsmatrix P
|
||||
# 1) Gewichtsmatrix P
|
||||
P = StochastischesModell.berechne_P(Q_ext)
|
||||
if isinstance(P, np.ndarray):
|
||||
P = sp.Matrix(P)
|
||||
|
||||
# 3) Normalgleichungsmatrix N und Absolutgliedvektor n
|
||||
N = A_ext.T * P * A_ext
|
||||
n = A_ext.T * P * dl_ext
|
||||
# 2) Normalgleichungsmatrix N und Absolutgliedvektor n
|
||||
N = A.T @ P @ A
|
||||
n = A.T @ P @ dl
|
||||
|
||||
# 4) Zuschlagsvektor dx
|
||||
dx = N.LUsolve(n)
|
||||
# 3) Zuschlagsvektor dx und Unbekanntenvektor x
|
||||
dx = np.linalg.inv(N) @ n
|
||||
|
||||
# 5) Residuenvektor v
|
||||
v = dl - A * dx
|
||||
# 4) Residuenvektor v
|
||||
v = A @ dx - dl
|
||||
|
||||
# 6) Kofaktormatrix der Unbekannten Q_xx
|
||||
# 5) Kofaktormatrix der Unbekannten Q_xx
|
||||
Q_xx = StochastischesModell.berechne_Q_xx(N)
|
||||
|
||||
# 7) Kofaktormatrix der Beobachtungen Q_ll_dach
|
||||
Q_ll_dach = A * Q_xx * A.T
|
||||
# 6) Kofaktormatrix der Beobachtungen Q_ll_dach
|
||||
Q_ll_dach = StochastischesModell.berechne_Q_ll_dach(A, Q_xx)
|
||||
|
||||
# 8) Kofaktormatrix der Verbesserungen Q_vv
|
||||
Q_vv = StochastischesModell.berechne_Qvv(A, P, Q_xx)
|
||||
# 7) Kofaktormatrix der Verbesserungen Q_vv
|
||||
Q_vv = StochastischesModell.berechne_Qvv(Q_ext, Q_ll_dach)
|
||||
|
||||
# 9) Redundanzmatrix R und Redundanzanteile r
|
||||
R = StochastischesModell.berechne_R(Q_vv, P) #Redundanzmatrix R
|
||||
r = StochastischesModell.berechne_r(R) #Redundanzanteile als Vektor r
|
||||
redundanzanteile = A.shape[0] - A.shape[1] #n-u+d
|
||||
|
||||
# 10) s0 a posteriori
|
||||
soaposteriori = Genauigkeitsmaße.s0apost(v, P, redundanzanteile)
|
||||
|
||||
# 11) Ausgabe
|
||||
# 8) Ausgabe
|
||||
dict_ausgleichung = {
|
||||
"dx": dx,
|
||||
"v": v,
|
||||
@@ -68,94 +42,11 @@ def ausgleichung_global(
|
||||
"Q_xx": Q_xx,
|
||||
"Q_ll_dach": Q_ll_dach,
|
||||
"Q_vv": Q_vv,
|
||||
"R": R,
|
||||
"r": r,
|
||||
"soaposteriori": soaposteriori,
|
||||
}
|
||||
|
||||
Export.Export.ausgleichung_to_datei(r"Zwischenergebnisse\Ausgleichung_Iteration0.csv", dict_ausgleichung)
|
||||
return dict_ausgleichung, dx
|
||||
|
||||
|
||||
|
||||
|
||||
def ausgleichung_lokal(
|
||||
A: sp.Matrix,
|
||||
dl: sp.Matrix,
|
||||
Q_ll: sp.Matrix,
|
||||
x0: sp.Matrix,
|
||||
liste_punktnummern,
|
||||
auswahl,
|
||||
mit_massstab: bool = True,
|
||||
):
|
||||
# 1) Gewichtsmatrix P
|
||||
P = StochastischesModell.berechne_P(Q_ll)
|
||||
|
||||
# 2) Normalgleichungsmatrix N und Absolutgliedvektor n
|
||||
N = A.T * P * A
|
||||
n = A.T * P * dl
|
||||
|
||||
# 3) Datumsfestlegung (Teilspurminimierung)
|
||||
G = Datumsfestlegung.raenderungsmatrix_G(x0, liste_punktnummern, mit_massstab=mit_massstab)
|
||||
aktive = Datumsfestlegung.datumskomponenten(auswahl, liste_punktnummern)
|
||||
E = Datumsfestlegung.auswahlmatrix_E(u=A.cols, aktive_unbekannte_indices=aktive)
|
||||
Gi = E * G
|
||||
|
||||
# 4) Zuschlagsvektor dx
|
||||
dx = Datumsfestlegung.berechne_dx_geraendert(N, n, Gi)
|
||||
|
||||
# 5) Residuenvektor v
|
||||
v = dl - A * dx
|
||||
|
||||
# 6) Kofaktormatrix der Unbekannten Q_xx
|
||||
N_inv = N.inv()
|
||||
N_inv_G = N_inv * Gi
|
||||
S = Gi.T * N_inv_G
|
||||
S_inv = S.inv()
|
||||
Q_xx = N_inv - N_inv_G * S_inv * N_inv_G.T
|
||||
|
||||
# 7) Kofaktormatrix der Beobachtungen Q_ll_dach
|
||||
Q_lhat_lhat = A * Q_xx * A.T
|
||||
|
||||
# 8) Kofaktormatrix der Verbesserungen Q_vv
|
||||
Q_vv = P.inv() - Q_lhat_lhat
|
||||
|
||||
# 9) Redundanzmatrix R, Redundanzanteile r, Redundanz
|
||||
R = Q_vv * P
|
||||
r_vec = sp.Matrix(R.diagonal())
|
||||
n_beob = A.rows
|
||||
u = A.cols
|
||||
d = Gi.shape[1]
|
||||
r_gesamt = n_beob - u + d
|
||||
|
||||
# 10) s0 a posteriori
|
||||
sigma0_apost = Genauigkeitsmaße.s0apost(v, P, r_gesamt)
|
||||
|
||||
# 11) Ausgabe
|
||||
dict_ausgleichung_lokal = {
|
||||
"dx": dx,
|
||||
"v": v,
|
||||
"Q_ll": Q_ll,
|
||||
"P": P,
|
||||
"N": N,
|
||||
"Q_xx": Q_xx,
|
||||
"Q_lhat_lhat": Q_lhat_lhat,
|
||||
"Q_vv": Q_vv,
|
||||
"R": R,
|
||||
"r": r_vec,
|
||||
"r_gesamt": r_gesamt,
|
||||
"sigma0_apost": sigma0_apost,
|
||||
"G": G,
|
||||
"Gi": Gi,
|
||||
}
|
||||
|
||||
Export.Export.ausgleichung_to_datei(r"Zwischenergebnisse\Ausgleichung_Iteration0_lokal.csv", dict_ausgleichung_lokal)
|
||||
return dict_ausgleichung_lokal, dx
|
||||
|
||||
|
||||
|
||||
|
||||
def ausgleichung_lokal_numpy(
|
||||
A,
|
||||
dl,
|
||||
Q_ll,
|
||||
|
||||
@@ -124,54 +124,69 @@ class StochastischesModell:
|
||||
|
||||
if beobachtungsart_i == "gnssbx":
|
||||
cxx = sp.symbols(f"cxx_{beobachtungenID_i}")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}**2")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}")
|
||||
liste_standardabweichungen_symbole.append(cxx)
|
||||
Qll[i, i] = cxx * s0
|
||||
Qll[i, i] = cxx * (s0 ** 2)
|
||||
|
||||
cxy = sp.Symbol(f"cxy_{beobachtungenID_i}")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}**2")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}")
|
||||
for j in range(i + 1, len(liste_beobachtungen_symbolisch)):
|
||||
beobachtung_symbolisch_j = liste_beobachtungen_symbolisch[j]
|
||||
aufgeteilt_j = beobachtung_symbolisch_j.split("_")
|
||||
|
||||
if int(aufgeteilt_j[0]) == beobachtungenID_i and aufgeteilt_j[1] == "gnssby":
|
||||
Qll[i, j] = cxy * s0
|
||||
Qll[j, i] = cxy * s0
|
||||
Qll[i, j] = cxy * (s0 ** 2)
|
||||
Qll[j, i] = cxy * (s0 ** 2)
|
||||
break
|
||||
|
||||
cxz = sp.Symbol(f"cxz_{beobachtungenID_i}")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}**2")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}")
|
||||
for j in range(i + 1, len(liste_beobachtungen_symbolisch)):
|
||||
beobachtung_symbolisch_j = liste_beobachtungen_symbolisch[j]
|
||||
aufgeteilt_j = beobachtung_symbolisch_j.split("_")
|
||||
|
||||
if int(aufgeteilt_j[0]) == beobachtungenID_i and aufgeteilt_j[1] == "gnssbz":
|
||||
Qll[i, j] = cxz * s0
|
||||
Qll[j, i] = cxz * s0
|
||||
Qll[i, j] = cxz * (s0 ** 2)
|
||||
Qll[j, i] = cxz * (s0 ** 2)
|
||||
break
|
||||
|
||||
if beobachtungsart_i == "gnssby":
|
||||
cyy = sp.symbols(f"cyy_{beobachtungenID_i}")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}**2")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}")
|
||||
liste_standardabweichungen_symbole.append(cyy)
|
||||
Qll[i, i] = cyy * s0
|
||||
Qll[i, i] = cyy * (s0 ** 2)
|
||||
|
||||
cyz = sp.Symbol(f"cyz_{beobachtungenID_i}")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}**2")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}")
|
||||
for j in range(i + 1, len(liste_beobachtungen_symbolisch)):
|
||||
beobachtung_symbolisch_j = liste_beobachtungen_symbolisch[j]
|
||||
aufgeteilt_j = beobachtung_symbolisch_j.split("_")
|
||||
|
||||
if int(aufgeteilt_j[0]) == beobachtungenID_i and aufgeteilt_j[1] == "gnssbz":
|
||||
Qll[i, j] = cyz * s0
|
||||
Qll[j, i] = cyz * s0
|
||||
Qll[i, j] = cyz * (s0 ** 2)
|
||||
Qll[j, i] = cyz * (s0 ** 2)
|
||||
break
|
||||
|
||||
if beobachtungsart_i == "gnssbz":
|
||||
czz = sp.symbols(f"czz_{beobachtungenID_i}")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}**2")
|
||||
s0 = sp.symbols(f"s0_{beobachtungenID_i}")
|
||||
liste_standardabweichungen_symbole.append(czz)
|
||||
Qll[i, i] = czz * s0
|
||||
Qll[i, i] = czz * (s0 ** 2)
|
||||
|
||||
if aufgeteilt_i[1] == "niv":
|
||||
beobachtungenID_i = int(aufgeteilt_i[0])
|
||||
instrumenteID_i = dict_beobachtungenID_instrumenteID[beobachtungenID_i]
|
||||
beobachtungsart_i = str(aufgeteilt_i[1])
|
||||
|
||||
stabw_apriori_konstant = sp.Symbol(f"stabw_apriori_konstant_{beobachtungsart_i}_{instrumenteID_i}")
|
||||
stabw_apriori_streckenprop = sp.Symbol(f"stabw_apriori_streckenprop_{beobachtungsart_i}_{instrumenteID_i}")
|
||||
nivellement_distanz = sp.Symbol(f"niv_distanz_{beobachtungenID_i}")
|
||||
nivellement_anz_wechselpunkte = sp.Symbol(f"niv_anz_wechselpunkte_{beobachtungenID_i}")
|
||||
|
||||
sigma = sp.sqrt(nivellement_anz_wechselpunkte * stabw_apriori_konstant ** 2 + stabw_apriori_streckenprop ** 2 * nivellement_distanz / 1000)
|
||||
liste_standardabweichungen_symbole.append(sigma)
|
||||
|
||||
Qll[i, i] = sigma ** 2
|
||||
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Qll_Symbolisch.csv", liste_beobachtungen_symbolisch, liste_beobachtungen_symbolisch, Qll, "Qll")
|
||||
return Qll
|
||||
@@ -186,6 +201,7 @@ class StochastischesModell:
|
||||
|
||||
liste_beobachtungen_tachymeter = db_zugriff.get_beobachtungen_from_beobachtungenid()
|
||||
liste_beobachtungen_gnss = db_zugriff.get_beobachtungen_gnssbasislinien()
|
||||
liste_beobachtungen_nivellement = db_zugriff.get_beobachtungen_nivellement()
|
||||
|
||||
dict_beobachtungenID_distanz = {}
|
||||
for standpunkt, zielpunkt, beobachtungenID, beobachtungsgruppeID, tachymeter_richtung, tachymeter_zenitwinkel, tachymeter_distanz in liste_beobachtungen_tachymeter:
|
||||
@@ -218,6 +234,8 @@ class StochastischesModell:
|
||||
beobachtungsart_kurz = "R"
|
||||
elif beobachtungsart == "Tachymeter_Zenitwinkel":
|
||||
beobachtungsart_kurz = "ZW"
|
||||
elif beobachtungsart == "Geometrisches_Nivellement":
|
||||
beobachtungsart_kurz = "niv"
|
||||
|
||||
|
||||
if stabw_apriori_konstant is not None:
|
||||
@@ -257,9 +275,14 @@ class StochastischesModell:
|
||||
substitutionen[sp.Symbol(f"czz_{beobachtungenID}")] = float(gnss_czz)
|
||||
substitutionen[sp.Symbol(f"s0_{beobachtungenID}")] = float(gnss_s0)
|
||||
|
||||
# Geometrisches Nivellement
|
||||
for nivellement in liste_beobachtungen_nivellement:
|
||||
beobachtungenID = nivellement[0]
|
||||
niv_strecke = nivellement[4]
|
||||
niv_anz_standpkte = nivellement[5]
|
||||
|
||||
|
||||
|
||||
substitutionen[sp.Symbol(f"niv_anz_wechselpunkte_{beobachtungenID}")] = float(niv_anz_standpkte)
|
||||
substitutionen[sp.Symbol(f"niv_distanz_{beobachtungenID}")] = float(niv_strecke)
|
||||
|
||||
#Qll_numerisch = Qll_Matrix_Symbolisch.xreplace(substitutionen)
|
||||
if (self.func_Qll_numerisch is None) or (set(self.liste_symbole_lambdify) != set(substitutionen.keys())):
|
||||
@@ -271,6 +294,11 @@ class StochastischesModell:
|
||||
cse=True
|
||||
)
|
||||
|
||||
fehlend = sorted(list(Qll_Matrix_Symbolisch.free_symbols - set(substitutionen.keys())), key=lambda s: str(s))
|
||||
if fehlend:
|
||||
raise ValueError(
|
||||
f"Qll_numerisch: Fehlende Substitutionen ({len(fehlend)}): {[str(s) for s in fehlend[:80]]}")
|
||||
|
||||
liste_werte = [substitutionen[s] for s in self.liste_symbole_lambdify]
|
||||
Qll_numerisch = np.asarray(self.func_Qll_numerisch(*liste_werte), dtype=float)
|
||||
|
||||
@@ -345,20 +373,20 @@ class StochastischesModell:
|
||||
|
||||
@staticmethod
|
||||
def berechne_P(Q_ll):
|
||||
return np.linalg.inv(Q_ll)
|
||||
P = np.linalg.inv(Q_ll)
|
||||
return P
|
||||
|
||||
@staticmethod
|
||||
def berechne_Q_xx(N):
|
||||
if N.shape[0] != N.shape[1]:
|
||||
raise ValueError("N muss eine quadratische Matrix sein")
|
||||
return np.linalg.inv(N)
|
||||
Qxx = np.linalg.inv(N)
|
||||
return Qxx
|
||||
|
||||
def berechne_Qvv(self, A, P, Q_xx):
|
||||
Q_vv = np.linalg.inv(P) - A @ Q_xx @ A.T
|
||||
def berechne_Q_ll_dach(A, Q_xx):
|
||||
Q_ll_dach = A @ Q_xx @ A.T
|
||||
return Q_ll_dach
|
||||
|
||||
def berechne_Qvv(Q_ll, Q_ll_dach):
|
||||
Q_vv = Q_ll - Q_ll_dach
|
||||
return Q_vv
|
||||
|
||||
def berechne_R(self, Q_vv, P):
|
||||
return Q_vv @ P #Redundanzmatrix
|
||||
|
||||
def berechne_r(self, R):
|
||||
return np.diag(R).reshape(-1, 1) #Redundanzanteile
|
||||
@@ -2834,3 +2834,179 @@ Beobachtungsvektor;
|
||||
69_gnssbx_GNA2_10044;27222,0343000000
|
||||
69_gnssby_GNA2_10044;-51031,7762000000
|
||||
69_gnssbz_GNA2_10044;-14409,2568000000
|
||||
946_niv_812_10047;-0,291855000000000
|
||||
947_niv_10047_10046;-0,0622100000000000
|
||||
948_niv_10046_10045;0,185375000000000
|
||||
949_niv_10045_10034;0,0161750000000000
|
||||
950_niv_10034_10035;0,340540000000000
|
||||
951_niv_10035_10029;-0,530220000000000
|
||||
952_niv_10029_10030;0,0293100000000000
|
||||
953_niv_10030_10031;0,288435000000000
|
||||
954_niv_10031_10017;0,108750000000000
|
||||
955_niv_10017_10013;-0,171175000000000
|
||||
956_niv_10013_10012;0,0951900000000000
|
||||
957_niv_10012_10014;0,00363000000000000
|
||||
958_niv_10014_10015;0,0874400000000000
|
||||
959_niv_10015_10016;0,0732750000000000
|
||||
960_niv_10016_10007;-0,138150000000000
|
||||
961_niv_10007_10007;-5,00000000000000e-6
|
||||
962_niv_10007_10016;0,138140000000000
|
||||
963_niv_10016_10015;-0,0733050000000000
|
||||
964_niv_10015_10014;-0,0874000000000000
|
||||
965_niv_10014_10012;-0,00352000000000000
|
||||
966_niv_10012_10013;-0,0951850000000000
|
||||
967_niv_10013_10017;0,171240000000000
|
||||
968_niv_10017_10031;-0,108670000000000
|
||||
969_niv_10031_10030;-0,288305000000000
|
||||
970_niv_10030_10029;-0,0292550000000000
|
||||
971_niv_10029_10035;0,530365000000000
|
||||
972_niv_10035_10034;-0,340460000000000
|
||||
973_niv_10034_10045;-0,0162900000000000
|
||||
974_niv_10045_10046;-0,185265000000000
|
||||
975_niv_10046_10047;0,0619550000000000
|
||||
976_niv_10047_812;0,292045000000000
|
||||
977_niv_666_10054;-0,0780400000000000
|
||||
978_niv_10054_10056;-0,276060000000000
|
||||
979_niv_10056_10058;0,443385000000000
|
||||
980_niv_10058_10052;0,418625000000000
|
||||
981_niv_10052_10043;0,0300500000000000
|
||||
982_niv_10043_10026;-0,0113050000000000
|
||||
983_niv_10026_10010;-0,374800000000000
|
||||
984_niv_10010_10006;-0,341690000000000
|
||||
985_niv_10006_10010;0,341700000000000
|
||||
986_niv_10010_10026;0,374990000000000
|
||||
987_niv_10026_10043;0,0112600000000000
|
||||
988_niv_10043_10052;-0,0298900000000000
|
||||
989_niv_10052_10058;-0,418480000000000
|
||||
990_niv_10058_10056;-0,443375000000000
|
||||
991_niv_10056_10054;0,276070000000000
|
||||
992_niv_10054_666;0,0781150000000000
|
||||
993_niv_816_10048;-0,0977900000000000
|
||||
994_niv_10048_812;0,100375000000000
|
||||
995_niv_812_10049;0,107250000000000
|
||||
996_niv_10049_10053;0,281115000000000
|
||||
997_niv_10053_10050;-0,111965000000000
|
||||
998_niv_10050_10051;0,121555000000000
|
||||
999_niv_10051_10040;-0,118450000000000
|
||||
1000_niv_10040_10037;-0,172585000000000
|
||||
1001_niv_10037_10038;0,0945300000000000
|
||||
1002_niv_10038_10039;0,00798500000000000
|
||||
1003_niv_10039_10032;-0,367745000000000
|
||||
1004_niv_10032_10031;0,134105000000000
|
||||
1005_niv_10031_10033;-0,00539500000000000
|
||||
1006_niv_10033_10025;0,560140000000000
|
||||
1007_niv_10025_10024;-0,111940000000000
|
||||
1008_niv_10024_10023;0,0392000000000000
|
||||
1009_niv_10023_10022;-0,123095000000000
|
||||
1010_niv_10022_10021;0,132880000000000
|
||||
1011_niv_10021_10026;-0,0489250000000000
|
||||
1012_niv_10026_10020;0,0351850000000000
|
||||
1013_niv_10020_10019;-0,361530000000000
|
||||
1014_niv_10019_10017;-0,00806000000000000
|
||||
1015_niv_10017_10019;0,00800000000000000
|
||||
1016_niv_10019_10020;0,361585000000000
|
||||
1017_niv_10020_10026;-0,0351300000000000
|
||||
1018_niv_10026_10021;0,0489050000000000
|
||||
1019_niv_10021_10022;-0,132800000000000
|
||||
1020_niv_10022_10023;0,123115000000000
|
||||
1021_niv_10023_10024;-0,0392450000000000
|
||||
1022_niv_10024_10025;0,111955000000000
|
||||
1023_niv_10025_10033;-0,560150000000000
|
||||
1024_niv_10033_10031;0,00535000000000000
|
||||
1025_niv_10031_10032;-0,134080000000000
|
||||
1026_niv_10032_10039;0,367790000000000
|
||||
1027_niv_10039_10038;-0,00793000000000000
|
||||
1028_niv_10038_10037;-0,0945250000000000
|
||||
1029_niv_10037_10040;0,172585000000000
|
||||
1030_niv_10040_10051;0,118465000000000
|
||||
1031_niv_10051_10050;-0,121385000000000
|
||||
1032_niv_10050_10053;0,111785000000000
|
||||
1033_niv_10053_10049;-0,280970000000000
|
||||
1034_niv_10049_812;-0,107090000000000
|
||||
1035_niv_812_10048;-0,100350000000000
|
||||
1036_niv_10048_816;0,0975800000000000
|
||||
1037_niv_816_812;0,00288000000000000
|
||||
1038_niv_812_10045;-0,168655000000000
|
||||
1039_niv_10045_10034;0,0163350000000000
|
||||
1040_niv_10034_10035;0,340615000000000
|
||||
1041_niv_10035_10036;-0,0740500000000000
|
||||
1042_niv_10036_10036;0,0
|
||||
1043_niv_10036_10035;0,0740700000000000
|
||||
1044_niv_10035_10034;-0,340510000000000
|
||||
1045_niv_10034_10028;0,0542000000000000
|
||||
1046_niv_10028_10011;0,0288950000000000
|
||||
1047_niv_10011_10001;0,0824600000000000
|
||||
1048_niv_10001_10003;-0,148740000000000
|
||||
1049_niv_10003_10007;0,169670000000000
|
||||
1050_niv_10007_10008;-0,184425000000000
|
||||
1051_niv_10008_10005;-0,0105250000000000
|
||||
1052_niv_10005_10006;-0,137375000000000
|
||||
1053_niv_10006_10004;-0,0892100000000000
|
||||
1054_niv_10004_10002;0,0985100000000000
|
||||
1055_niv_10002_10004;-0,0985100000000000
|
||||
1056_niv_10004_10006;0,0893750000000000
|
||||
1057_niv_10006_10005;0,137420000000000
|
||||
1058_niv_10005_10008;0,0105100000000000
|
||||
1059_niv_10008_10007;0,184390000000000
|
||||
1060_niv_10007_10003;-0,169750000000000
|
||||
1061_niv_10003_10001;0,148925000000000
|
||||
1062_niv_10001_10011;-0,0824850000000000
|
||||
1063_niv_10011_10028;-0,0289100000000000
|
||||
1064_niv_10028_10034;-0,0543150000000000
|
||||
1065_niv_10034_10045;-0,0162850000000000
|
||||
1066_niv_10045_812;0,168760000000000
|
||||
1067_niv_812_816;-0,00279500000000000
|
||||
1068_niv_666_10055;-0,327725000000000
|
||||
1069_niv_10055_10057;0,315220000000000
|
||||
1070_niv_10057_10059;0,265120000000000
|
||||
1071_niv_10059_10053;0,243275000000000
|
||||
1072_niv_10053_10044;0,270060000000000
|
||||
1073_niv_10044_10040;-0,378935000000000
|
||||
1074_niv_10040_10041;0,321375000000000
|
||||
1075_niv_10041_10042;-0,132085000000000
|
||||
1076_niv_10042_10027;-0,0240100000000000
|
||||
1077_niv_10027_10018;-0,0710750000000000
|
||||
1078_niv_10018_10009;-0,249365000000000
|
||||
1079_niv_10009_10006;-0,422070000000000
|
||||
1080_niv_10006_10009;0,421955000000000
|
||||
1081_niv_10009_10018;0,249120000000000
|
||||
1082_niv_10018_10027;0,0710100000000000
|
||||
1083_niv_10027_10042;0,0239700000000000
|
||||
1084_niv_10042_10041;0,132025000000000
|
||||
1085_niv_10041_10040;-0,321355000000000
|
||||
1086_niv_10040_10044;0,378885000000000
|
||||
1087_niv_10044_10053;-0,269960000000000
|
||||
1088_niv_10053_10059;-0,243415000000000
|
||||
1089_niv_10059_10057;-0,265175000000000
|
||||
1090_niv_10057_10055;-0,315140000000000
|
||||
1091_niv_10055_666;0,327665000000000
|
||||
lA_X10008;4,92367726504333
|
||||
lA_X10001;1,26605775157788
|
||||
lA_X10002;0,285210000063436
|
||||
lA_X10026;5,02925240821985
|
||||
lA_X10044;0,0261144531618106
|
||||
lA_X10037;5,23567561375748
|
||||
lA_X10059;5,00111373868016
|
||||
lA_X10028;5,54402940874687
|
||||
lA_X10014;4,82178070611008
|
||||
lA_X10054;4,79815140104357
|
||||
lA_Y10008;1,78207794196000
|
||||
lA_Y10001;0,728519252014788
|
||||
lA_Y10002;6,04764519363152
|
||||
lA_Y10026;4,54685933523997
|
||||
lA_Y10044;0,165678389410867
|
||||
lA_Y10037;5,31473326291553
|
||||
lA_Y10059;4,60902832087366
|
||||
lA_Y10028;0,567534691246054
|
||||
lA_Y10014;3,08492784517938
|
||||
lA_Y10054;3,23993836649799
|
||||
lA_Z10008;6,09405367015446
|
||||
lA_Z10001;5,98476482642249
|
||||
lA_Z10002;4,72722491383312
|
||||
lA_Z10026;6,17546480995430
|
||||
lA_Z10044;4,98789164099722
|
||||
lA_Z10037;4,02531984252138
|
||||
lA_Z10059;4,81608523482869
|
||||
lA_Z10028;0,0735417290281760
|
||||
lA_Z10014;3,86495602040732
|
||||
lA_Z10054;0,272279281988137
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -2834,3 +2834,179 @@ Beobachtungsvektor;
|
||||
69_gnssbx_GNA2_10044;X10044 - XGNA2
|
||||
69_gnssby_GNA2_10044;Y10044 - YGNA2
|
||||
69_gnssbz_GNA2_10044;Z10044 - ZGNA2
|
||||
946_niv_812_10047;NH10047 - NH812
|
||||
947_niv_10047_10046;NH10046 - NH10047
|
||||
948_niv_10046_10045;NH10045 - NH10046
|
||||
949_niv_10045_10034;NH10034 - NH10045
|
||||
950_niv_10034_10035;-NH10034 + NH10035
|
||||
951_niv_10035_10029;NH10029 - NH10035
|
||||
952_niv_10029_10030;-NH10029 + NH10030
|
||||
953_niv_10030_10031;-NH10030 + NH10031
|
||||
954_niv_10031_10017;NH10017 - NH10031
|
||||
955_niv_10017_10013;NH10013 - NH10017
|
||||
956_niv_10013_10012;NH10012 - NH10013
|
||||
957_niv_10012_10014;-NH10012 + NH10014
|
||||
958_niv_10014_10015;-NH10014 + NH10015
|
||||
959_niv_10015_10016;-NH10015 + NH10016
|
||||
960_niv_10016_10007;NH10007 - NH10016
|
||||
961_niv_10007_10007;0.0
|
||||
962_niv_10007_10016;-NH10007 + NH10016
|
||||
963_niv_10016_10015;NH10015 - NH10016
|
||||
964_niv_10015_10014;NH10014 - NH10015
|
||||
965_niv_10014_10012;NH10012 - NH10014
|
||||
966_niv_10012_10013;-NH10012 + NH10013
|
||||
967_niv_10013_10017;-NH10013 + NH10017
|
||||
968_niv_10017_10031;-NH10017 + NH10031
|
||||
969_niv_10031_10030;NH10030 - NH10031
|
||||
970_niv_10030_10029;NH10029 - NH10030
|
||||
971_niv_10029_10035;-NH10029 + NH10035
|
||||
972_niv_10035_10034;NH10034 - NH10035
|
||||
973_niv_10034_10045;-NH10034 + NH10045
|
||||
974_niv_10045_10046;-NH10045 + NH10046
|
||||
975_niv_10046_10047;-NH10046 + NH10047
|
||||
976_niv_10047_812;-NH10047 + NH812
|
||||
977_niv_666_10054;NH10054 - NH666
|
||||
978_niv_10054_10056;-NH10054 + NH10056
|
||||
979_niv_10056_10058;-NH10056 + NH10058
|
||||
980_niv_10058_10052;NH10052 - NH10058
|
||||
981_niv_10052_10043;NH10043 - NH10052
|
||||
982_niv_10043_10026;NH10026 - NH10043
|
||||
983_niv_10026_10010;NH10010 - NH10026
|
||||
984_niv_10010_10006;NH10006 - NH10010
|
||||
985_niv_10006_10010;-NH10006 + NH10010
|
||||
986_niv_10010_10026;-NH10010 + NH10026
|
||||
987_niv_10026_10043;-NH10026 + NH10043
|
||||
988_niv_10043_10052;-NH10043 + NH10052
|
||||
989_niv_10052_10058;-NH10052 + NH10058
|
||||
990_niv_10058_10056;NH10056 - NH10058
|
||||
991_niv_10056_10054;NH10054 - NH10056
|
||||
992_niv_10054_666;-NH10054 + NH666
|
||||
993_niv_816_10048;NH10048 - NH816
|
||||
994_niv_10048_812;-NH10048 + NH812
|
||||
995_niv_812_10049;NH10049 - NH812
|
||||
996_niv_10049_10053;-NH10049 + NH10053
|
||||
997_niv_10053_10050;NH10050 - NH10053
|
||||
998_niv_10050_10051;-NH10050 + NH10051
|
||||
999_niv_10051_10040;NH10040 - NH10051
|
||||
1000_niv_10040_10037;NH10037 - NH10040
|
||||
1001_niv_10037_10038;-NH10037 + NH10038
|
||||
1002_niv_10038_10039;-NH10038 + NH10039
|
||||
1003_niv_10039_10032;NH10032 - NH10039
|
||||
1004_niv_10032_10031;NH10031 - NH10032
|
||||
1005_niv_10031_10033;-NH10031 + NH10033
|
||||
1006_niv_10033_10025;NH10025 - NH10033
|
||||
1007_niv_10025_10024;NH10024 - NH10025
|
||||
1008_niv_10024_10023;NH10023 - NH10024
|
||||
1009_niv_10023_10022;NH10022 - NH10023
|
||||
1010_niv_10022_10021;NH10021 - NH10022
|
||||
1011_niv_10021_10026;-NH10021 + NH10026
|
||||
1012_niv_10026_10020;NH10020 - NH10026
|
||||
1013_niv_10020_10019;NH10019 - NH10020
|
||||
1014_niv_10019_10017;NH10017 - NH10019
|
||||
1015_niv_10017_10019;-NH10017 + NH10019
|
||||
1016_niv_10019_10020;-NH10019 + NH10020
|
||||
1017_niv_10020_10026;-NH10020 + NH10026
|
||||
1018_niv_10026_10021;NH10021 - NH10026
|
||||
1019_niv_10021_10022;-NH10021 + NH10022
|
||||
1020_niv_10022_10023;-NH10022 + NH10023
|
||||
1021_niv_10023_10024;-NH10023 + NH10024
|
||||
1022_niv_10024_10025;-NH10024 + NH10025
|
||||
1023_niv_10025_10033;-NH10025 + NH10033
|
||||
1024_niv_10033_10031;NH10031 - NH10033
|
||||
1025_niv_10031_10032;-NH10031 + NH10032
|
||||
1026_niv_10032_10039;-NH10032 + NH10039
|
||||
1027_niv_10039_10038;NH10038 - NH10039
|
||||
1028_niv_10038_10037;NH10037 - NH10038
|
||||
1029_niv_10037_10040;-NH10037 + NH10040
|
||||
1030_niv_10040_10051;-NH10040 + NH10051
|
||||
1031_niv_10051_10050;NH10050 - NH10051
|
||||
1032_niv_10050_10053;-NH10050 + NH10053
|
||||
1033_niv_10053_10049;NH10049 - NH10053
|
||||
1034_niv_10049_812;-NH10049 + NH812
|
||||
1035_niv_812_10048;NH10048 - NH812
|
||||
1036_niv_10048_816;-NH10048 + NH816
|
||||
1037_niv_816_812;NH812 - NH816
|
||||
1038_niv_812_10045;NH10045 - NH812
|
||||
1039_niv_10045_10034;NH10034 - NH10045
|
||||
1040_niv_10034_10035;-NH10034 + NH10035
|
||||
1041_niv_10035_10036;-NH10035 + NH10036
|
||||
1042_niv_10036_10036;0.0
|
||||
1043_niv_10036_10035;NH10035 - NH10036
|
||||
1044_niv_10035_10034;NH10034 - NH10035
|
||||
1045_niv_10034_10028;NH10028 - NH10034
|
||||
1046_niv_10028_10011;NH10011 - NH10028
|
||||
1047_niv_10011_10001;NH10001 - NH10011
|
||||
1048_niv_10001_10003;-NH10001 + NH10003
|
||||
1049_niv_10003_10007;-NH10003 + NH10007
|
||||
1050_niv_10007_10008;-NH10007 + NH10008
|
||||
1051_niv_10008_10005;NH10005 - NH10008
|
||||
1052_niv_10005_10006;-NH10005 + NH10006
|
||||
1053_niv_10006_10004;NH10004 - NH10006
|
||||
1054_niv_10004_10002;NH10002 - NH10004
|
||||
1055_niv_10002_10004;-NH10002 + NH10004
|
||||
1056_niv_10004_10006;-NH10004 + NH10006
|
||||
1057_niv_10006_10005;NH10005 - NH10006
|
||||
1058_niv_10005_10008;-NH10005 + NH10008
|
||||
1059_niv_10008_10007;NH10007 - NH10008
|
||||
1060_niv_10007_10003;NH10003 - NH10007
|
||||
1061_niv_10003_10001;NH10001 - NH10003
|
||||
1062_niv_10001_10011;-NH10001 + NH10011
|
||||
1063_niv_10011_10028;-NH10011 + NH10028
|
||||
1064_niv_10028_10034;-NH10028 + NH10034
|
||||
1065_niv_10034_10045;-NH10034 + NH10045
|
||||
1066_niv_10045_812;-NH10045 + NH812
|
||||
1067_niv_812_816;-NH812 + NH816
|
||||
1068_niv_666_10055;NH10055 - NH666
|
||||
1069_niv_10055_10057;-NH10055 + NH10057
|
||||
1070_niv_10057_10059;-NH10057 + NH10059
|
||||
1071_niv_10059_10053;NH10053 - NH10059
|
||||
1072_niv_10053_10044;NH10044 - NH10053
|
||||
1073_niv_10044_10040;NH10040 - NH10044
|
||||
1074_niv_10040_10041;-NH10040 + NH10041
|
||||
1075_niv_10041_10042;-NH10041 + NH10042
|
||||
1076_niv_10042_10027;NH10027 - NH10042
|
||||
1077_niv_10027_10018;NH10018 - NH10027
|
||||
1078_niv_10018_10009;NH10009 - NH10018
|
||||
1079_niv_10009_10006;NH10006 - NH10009
|
||||
1080_niv_10006_10009;-NH10006 + NH10009
|
||||
1081_niv_10009_10018;-NH10009 + NH10018
|
||||
1082_niv_10018_10027;-NH10018 + NH10027
|
||||
1083_niv_10027_10042;-NH10027 + NH10042
|
||||
1084_niv_10042_10041;NH10041 - NH10042
|
||||
1085_niv_10041_10040;NH10040 - NH10041
|
||||
1086_niv_10040_10044;-NH10040 + NH10044
|
||||
1087_niv_10044_10053;-NH10044 + NH10053
|
||||
1088_niv_10053_10059;-NH10053 + NH10059
|
||||
1089_niv_10059_10057;NH10057 - NH10059
|
||||
1090_niv_10057_10055;NH10055 - NH10057
|
||||
1091_niv_10055_666;-NH10055 + NH666
|
||||
lA_X10008;X10008
|
||||
lA_X10001;X10001
|
||||
lA_X10002;X10002
|
||||
lA_X10026;X10026
|
||||
lA_X10044;X10044
|
||||
lA_X10037;X10037
|
||||
lA_X10059;X10059
|
||||
lA_X10028;X10028
|
||||
lA_X10014;X10014
|
||||
lA_X10054;X10054
|
||||
lA_Y10008;Y10008
|
||||
lA_Y10001;Y10001
|
||||
lA_Y10002;Y10002
|
||||
lA_Y10026;Y10026
|
||||
lA_Y10044;Y10044
|
||||
lA_Y10037;Y10037
|
||||
lA_Y10059;Y10059
|
||||
lA_Y10028;Y10028
|
||||
lA_Y10014;Y10014
|
||||
lA_Y10054;Y10054
|
||||
lA_Z10008;Z10008
|
||||
lA_Z10001;Z10001
|
||||
lA_Z10002;Z10002
|
||||
lA_Z10026;Z10026
|
||||
lA_Z10044;Z10044
|
||||
lA_Z10037;Z10037
|
||||
lA_Z10059;Z10059
|
||||
lA_Z10028;Z10028
|
||||
lA_Z10014;Z10014
|
||||
lA_Z10054;Z10054
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user