Compare commits
38 Commits
4db713b2da
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| ae38545212 | |||
| cd0d60093e | |||
| 425e55f2a5 | |||
| 67c2540c26 | |||
| fe5e69dcf3 | |||
| 1654155f53 | |||
| b652f432b5 | |||
| ac03826d3e | |||
| a31d101576 | |||
| cfc5d60bb9 | |||
| 787ad56a03 | |||
| 6428e15757 | |||
| b5f5161b3d | |||
| 3accf4cf74 | |||
| 2519033f81 | |||
| e21d39844b | |||
| 90930d3619 | |||
| 44f31b8fb0 | |||
| feb33a5c93 | |||
| c38d8b3572 | |||
| 1efd1599f3 | |||
| 656c535208 | |||
| 5f23c68d84 | |||
| b4c1c0a5c8 | |||
| cac12f6569 | |||
| 546cfe9494 | |||
| d1a46f8db1 | |||
| 3aacff9a49 | |||
| 1cd357e5bc | |||
| de829f5e5b | |||
| 68809a1319 | |||
| 55c314dc0c | |||
| 7fc47363af | |||
| 8e999c80cc | |||
| 9d45b50b83 | |||
| d078147386 | |||
| 922b0fe516 | |||
| 669db5373f |
2
.idea/Masterprojekt-Campusnetz.iml
generated
2
.idea/Masterprojekt-Campusnetz.iml
generated
@@ -4,7 +4,7 @@
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.14" jdkType="Python SDK" />
|
||||
<orderEntry type="jdk" jdkName="Python 3.14 (Masterprojekt)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
26
.idea/dataSources.local.xml
generated
26
.idea/dataSources.local.xml
generated
@@ -14,5 +14,31 @@
|
||||
</introspection-scope>
|
||||
</schema-mapping>
|
||||
</data-source>
|
||||
<data-source name="Campusnetz.db [2]" uuid="1e70ba7f-d1a3-44cb-96d4-ec35638383ed">
|
||||
<database-info product="SQLite" version="3.45.1" jdbc-version="4.2" driver-name="SQLite JDBC" driver-version="3.45.1.0" dbms="SQLITE" exact-version="3.45.1" exact-driver-version="3.45">
|
||||
<identifier-quote-string>"</identifier-quote-string>
|
||||
</database-info>
|
||||
<case-sensitivity plain-identifiers="mixed" quoted-identifiers="mixed" />
|
||||
<secret-storage>master_key</secret-storage>
|
||||
<auth-provider>no-auth</auth-provider>
|
||||
<schema-mapping>
|
||||
<introspection-scope>
|
||||
<node kind="schema" qname="@" />
|
||||
</introspection-scope>
|
||||
</schema-mapping>
|
||||
</data-source>
|
||||
<data-source name="Campusnetz.db [3]" uuid="bf3bfb54-ef6e-4a90-8cb8-69f52581bab9">
|
||||
<database-info product="SQLite" version="3.45.1" jdbc-version="4.2" driver-name="SQLite JDBC" driver-version="3.45.1.0" dbms="SQLITE" exact-version="3.45.1" exact-driver-version="3.45">
|
||||
<identifier-quote-string>"</identifier-quote-string>
|
||||
</database-info>
|
||||
<case-sensitivity plain-identifiers="mixed" quoted-identifiers="mixed" />
|
||||
<secret-storage>master_key</secret-storage>
|
||||
<auth-provider>no-auth</auth-provider>
|
||||
<schema-mapping>
|
||||
<introspection-scope>
|
||||
<node kind="schema" qname="@" />
|
||||
</introspection-scope>
|
||||
</schema-mapping>
|
||||
</data-source>
|
||||
</component>
|
||||
</project>
|
||||
14
.idea/dataSources.xml
generated
14
.idea/dataSources.xml
generated
@@ -8,5 +8,19 @@
|
||||
<jdbc-url>jdbc:sqlite:$PROJECT_DIR$/Campusnetz.db</jdbc-url>
|
||||
<working-dir>$ProjectFileDir$</working-dir>
|
||||
</data-source>
|
||||
<data-source source="LOCAL" name="Campusnetz.db [2]" uuid="1e70ba7f-d1a3-44cb-96d4-ec35638383ed">
|
||||
<driver-ref>sqlite.xerial</driver-ref>
|
||||
<synchronize>true</synchronize>
|
||||
<jdbc-driver>org.sqlite.JDBC</jdbc-driver>
|
||||
<jdbc-url>jdbc:sqlite:$PROJECT_DIR$/Campusnetz.db</jdbc-url>
|
||||
<working-dir>$ProjectFileDir$</working-dir>
|
||||
</data-source>
|
||||
<data-source source="LOCAL" name="Campusnetz.db [3]" uuid="bf3bfb54-ef6e-4a90-8cb8-69f52581bab9">
|
||||
<driver-ref>sqlite.xerial</driver-ref>
|
||||
<synchronize>true</synchronize>
|
||||
<jdbc-driver>org.sqlite.JDBC</jdbc-driver>
|
||||
<jdbc-url>jdbc:sqlite:$PROJECT_DIR$/Campusnetz.db</jdbc-url>
|
||||
<working-dir>$ProjectFileDir$</working-dir>
|
||||
</data-source>
|
||||
</component>
|
||||
</project>
|
||||
70
.idea/dataSources/1e70ba7f-d1a3-44cb-96d4-ec35638383ed.corrupted.20251216-091841.reason.txt
generated
Normal file
70
.idea/dataSources/1e70ba7f-d1a3-44cb-96d4-ec35638383ed.corrupted.20251216-091841.reason.txt
generated
Normal file
@@ -0,0 +1,70 @@
|
||||
java.lang.IllegalStateException: Storage for [C:\Users\fabia\AppData\Local\JetBrains\PyCharm2025.2\data-source\e067176d\1e70ba7f-d1a3-44cb-96d4-ec35638383ed\entities\entities.dat] is already registered
|
||||
at com.intellij.util.io.FilePageCache.registerPagedFileStorage(FilePageCache.java:411)
|
||||
at com.intellij.util.io.PagedFileStorage.<init>(PagedFileStorage.java:74)
|
||||
at com.intellij.util.io.ResizeableMappedFile.<init>(ResizeableMappedFile.java:71)
|
||||
at com.intellij.util.io.PersistentBTreeEnumerator.<init>(PersistentBTreeEnumerator.java:130)
|
||||
at com.intellij.util.io.PersistentEnumerator.createDefaultEnumerator(PersistentEnumerator.java:53)
|
||||
at com.intellij.util.io.PersistentMapImpl.<init>(PersistentMapImpl.java:166)
|
||||
at com.intellij.util.io.PersistentMapImpl.<init>(PersistentMapImpl.java:141)
|
||||
at com.intellij.util.io.PersistentMapBuilder.buildImplementation(PersistentMapBuilder.java:91)
|
||||
at com.intellij.util.io.PersistentMapBuilder.build(PersistentMapBuilder.java:74)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackend.opened(ModelEntityStoragePersistentBackend.kt:136)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackend.<init>(ModelEntityStoragePersistentBackend.kt:86)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt$ourFlusher$1$create$1.invoke(ModelEntityStoragePersistentBackend.kt:45)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt$ourFlusher$1$create$1.invoke(ModelEntityStoragePersistentBackend.kt:45)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt$ourFlusher$1.create$lambda$0(ModelEntityStoragePersistentBackend.kt:45)
|
||||
at java.base/java.util.concurrent.ConcurrentMap.computeIfAbsent(ConcurrentMap.java:330)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt$ourFlusher$1.create(ModelEntityStoragePersistentBackend.kt:45)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt.create(ModelEntityStoragePersistentBackend.kt:76)
|
||||
at com.intellij.database.dataSource.srcStorage.DbSrcModelStorage.createBackend(DbSrcModelStorage.java:289)
|
||||
at com.intellij.database.dataSource.srcStorage.DbSrcModelStorage.createPersistentBackend(DbSrcModelStorage.java:280)
|
||||
at com.intellij.database.dataSource.srcStorage.DbSrcModelStorage.createEntityStorage(DbSrcModelStorage.java:260)
|
||||
at com.intellij.database.model.BaseModel.<init>(BaseModel.java:101)
|
||||
at com.intellij.database.dialects.sqlite.model.SqliteImplModel.<init>(SqliteImplModel.java:40)
|
||||
at com.intellij.database.model.meta.BasicMetaModel.newModel(BasicMetaModel.java:56)
|
||||
at com.intellij.database.model.ModelFacade.createModel(ModelFacade.java:28)
|
||||
at com.intellij.database.model.ModelFactory.createModel(ModelFactory.kt:22)
|
||||
at com.intellij.database.model.serialization.ModelImporter.deserializeFast(ModelImporter.java:91)
|
||||
at com.intellij.database.model.serialization.ModelImporter.deserialize(ModelImporter.java:68)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl$Companion.readModel(DataSourceModelStorageImpl.kt:605)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl$Companion.readModel(DataSourceModelStorageImpl.kt:588)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.readModel(DataSourceModelStorageImpl.kt:373)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.loadModels(DataSourceModelStorageImpl.kt:262)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.readStateHeavy(DataSourceModelStorageImpl.kt:161)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.continueLoadingWhenInitialized$lambda$8$lambda$7$lambda$6$lambda$5(DataSourceModelStorageImpl.kt:144)
|
||||
at com.intellij.openapi.progress.util.BackgroundTaskUtil.lambda$runUnderDisposeAwareIndicator$15(BackgroundTaskUtil.java:371)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$runProcess$1(CoreProgressManager.java:229)
|
||||
at com.intellij.platform.diagnostic.telemetry.helpers.TraceKt.use(trace.kt:44)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$runProcess$2(CoreProgressManager.java:228)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$executeProcessUnderProgress$14(CoreProgressManager.java:681)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.registerIndicatorAndRun(CoreProgressManager.java:756)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.computeUnderProgress(CoreProgressManager.java:712)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.executeProcessUnderProgress(CoreProgressManager.java:680)
|
||||
at com.intellij.openapi.progress.impl.ProgressManagerImpl.executeProcessUnderProgress(ProgressManagerImpl.java:78)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.runProcess(CoreProgressManager.java:209)
|
||||
at com.intellij.openapi.progress.util.BackgroundTaskUtil.runUnderDisposeAwareIndicator(BackgroundTaskUtil.java:366)
|
||||
at com.intellij.openapi.progress.util.BackgroundTaskUtil.runUnderDisposeAwareIndicator(BackgroundTaskUtil.java:349)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.continueLoadingWhenInitialized$lambda$8$lambda$7$lambda$6(DataSourceModelStorageImpl.kt:143)
|
||||
at com.intellij.database.util.AsyncTask$Frame$compute$$inlined$supply$1$1.run(AsyncTask.kt:878)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$executeProcessUnderProgress$14(CoreProgressManager.java:681)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.registerIndicatorAndRun(CoreProgressManager.java:756)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.computeUnderProgress(CoreProgressManager.java:712)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.executeProcessUnderProgress(CoreProgressManager.java:680)
|
||||
at com.intellij.openapi.progress.impl.ProgressManagerImpl.executeProcessUnderProgress(ProgressManagerImpl.java:78)
|
||||
at com.intellij.database.util.AsyncTask$Frame$compute$$inlined$supply$1.get(AsyncTask.kt:903)
|
||||
at java.base/java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1768)
|
||||
at com.intellij.util.concurrency.ChildContext$runInChildContext$1.invoke(propagation.kt:167)
|
||||
at com.intellij.util.concurrency.ChildContext$runInChildContext$1.invoke(propagation.kt:167)
|
||||
at com.intellij.util.concurrency.ChildContext.runInChildContext(propagation.kt:173)
|
||||
at com.intellij.util.concurrency.ChildContext.runInChildContext(propagation.kt:167)
|
||||
at com.intellij.util.concurrency.ContextRunnable.run(ContextRunnable.java:27)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl$continueLoadingWhenInitialized$2$1$1$1$1.invokeSuspend(DataSourceModelStorageImpl.kt:142)
|
||||
at kotlin.coroutines.jvm.internal.BaseContinuationImpl.resumeWith(ContinuationImpl.kt:33)
|
||||
at kotlinx.coroutines.DispatchedTask.run(DispatchedTask.kt:100)
|
||||
at kotlinx.coroutines.scheduling.CoroutineScheduler.runSafely(CoroutineScheduler.kt:613)
|
||||
at kotlinx.coroutines.scheduling.CoroutineScheduler$Worker.executeTask(CoroutineScheduler.kt:1189)
|
||||
at kotlinx.coroutines.scheduling.CoroutineScheduler$Worker.runWorker(CoroutineScheduler.kt:778)
|
||||
at kotlinx.coroutines.scheduling.CoroutineScheduler$Worker.run(CoroutineScheduler.kt:765)
|
||||
Suppressed: java.lang.Exception: Storage[C:\Users\fabia\AppData\Local\JetBrains\PyCharm2025.2\data-source\e067176d\1e70ba7f-d1a3-44cb-96d4-ec35638383ed\entities\entities.dat] registration stack trace
|
||||
at com.intellij.util.io.FilePageCache.registerPagedFileStorage(FilePageCache.java:438)
|
||||
... 65 more
|
||||
1714
.idea/dataSources/1e70ba7f-d1a3-44cb-96d4-ec35638383ed.corrupted.20251216-091841.xml
generated
Normal file
1714
.idea/dataSources/1e70ba7f-d1a3-44cb-96d4-ec35638383ed.corrupted.20251216-091841.xml
generated
Normal file
File diff suppressed because it is too large
Load Diff
1714
.idea/dataSources/1e70ba7f-d1a3-44cb-96d4-ec35638383ed.xml
generated
Normal file
1714
.idea/dataSources/1e70ba7f-d1a3-44cb-96d4-ec35638383ed.xml
generated
Normal file
File diff suppressed because it is too large
Load Diff
2
.idea/dataSources/1e70ba7f-d1a3-44cb-96d4-ec35638383ed/storage_v2/_src_/schema/main.uQUzAA.meta
generated
Normal file
2
.idea/dataSources/1e70ba7f-d1a3-44cb-96d4-ec35638383ed/storage_v2/_src_/schema/main.uQUzAA.meta
generated
Normal file
@@ -0,0 +1,2 @@
|
||||
#n:main
|
||||
!<md> [0, 0, null, null, -2147483648, -2147483648]
|
||||
70
.idea/dataSources/bf3bfb54-ef6e-4a90-8cb8-69f52581bab9.corrupted.20251218-033406.reason.txt
generated
Normal file
70
.idea/dataSources/bf3bfb54-ef6e-4a90-8cb8-69f52581bab9.corrupted.20251218-033406.reason.txt
generated
Normal file
@@ -0,0 +1,70 @@
|
||||
java.lang.IllegalStateException: Storage for [C:\Users\fabia\AppData\Local\JetBrains\PyCharm2025.2\data-source\e067176d\bf3bfb54-ef6e-4a90-8cb8-69f52581bab9\entities\entities.dat] is already registered
|
||||
at com.intellij.util.io.FilePageCache.registerPagedFileStorage(FilePageCache.java:411)
|
||||
at com.intellij.util.io.PagedFileStorage.<init>(PagedFileStorage.java:74)
|
||||
at com.intellij.util.io.ResizeableMappedFile.<init>(ResizeableMappedFile.java:71)
|
||||
at com.intellij.util.io.PersistentBTreeEnumerator.<init>(PersistentBTreeEnumerator.java:130)
|
||||
at com.intellij.util.io.PersistentEnumerator.createDefaultEnumerator(PersistentEnumerator.java:53)
|
||||
at com.intellij.util.io.PersistentMapImpl.<init>(PersistentMapImpl.java:166)
|
||||
at com.intellij.util.io.PersistentMapImpl.<init>(PersistentMapImpl.java:141)
|
||||
at com.intellij.util.io.PersistentMapBuilder.buildImplementation(PersistentMapBuilder.java:91)
|
||||
at com.intellij.util.io.PersistentMapBuilder.build(PersistentMapBuilder.java:74)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackend.opened(ModelEntityStoragePersistentBackend.kt:136)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackend.<init>(ModelEntityStoragePersistentBackend.kt:86)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt$ourFlusher$1$create$1.invoke(ModelEntityStoragePersistentBackend.kt:45)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt$ourFlusher$1$create$1.invoke(ModelEntityStoragePersistentBackend.kt:45)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt$ourFlusher$1.create$lambda$0(ModelEntityStoragePersistentBackend.kt:45)
|
||||
at java.base/java.util.concurrent.ConcurrentMap.computeIfAbsent(ConcurrentMap.java:330)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt$ourFlusher$1.create(ModelEntityStoragePersistentBackend.kt:45)
|
||||
at com.intellij.database.model.ModelEntityStoragePersistentBackendKt.create(ModelEntityStoragePersistentBackend.kt:76)
|
||||
at com.intellij.database.dataSource.srcStorage.DbSrcModelStorage.createBackend(DbSrcModelStorage.java:289)
|
||||
at com.intellij.database.dataSource.srcStorage.DbSrcModelStorage.createPersistentBackend(DbSrcModelStorage.java:280)
|
||||
at com.intellij.database.dataSource.srcStorage.DbSrcModelStorage.createEntityStorage(DbSrcModelStorage.java:260)
|
||||
at com.intellij.database.model.BaseModel.<init>(BaseModel.java:101)
|
||||
at com.intellij.database.dialects.sqlite.model.SqliteImplModel.<init>(SqliteImplModel.java:40)
|
||||
at com.intellij.database.model.meta.BasicMetaModel.newModel(BasicMetaModel.java:56)
|
||||
at com.intellij.database.model.ModelFacade.createModel(ModelFacade.java:28)
|
||||
at com.intellij.database.model.ModelFactory.createModel(ModelFactory.kt:22)
|
||||
at com.intellij.database.model.serialization.ModelImporter.deserializeFast(ModelImporter.java:91)
|
||||
at com.intellij.database.model.serialization.ModelImporter.deserialize(ModelImporter.java:68)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl$Companion.readModel(DataSourceModelStorageImpl.kt:605)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl$Companion.readModel(DataSourceModelStorageImpl.kt:588)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.readModel(DataSourceModelStorageImpl.kt:373)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.loadModels(DataSourceModelStorageImpl.kt:262)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.readStateHeavy(DataSourceModelStorageImpl.kt:161)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.continueLoadingWhenInitialized$lambda$8$lambda$7$lambda$6$lambda$5(DataSourceModelStorageImpl.kt:144)
|
||||
at com.intellij.openapi.progress.util.BackgroundTaskUtil.lambda$runUnderDisposeAwareIndicator$15(BackgroundTaskUtil.java:371)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$runProcess$1(CoreProgressManager.java:229)
|
||||
at com.intellij.platform.diagnostic.telemetry.helpers.TraceKt.use(trace.kt:44)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$runProcess$2(CoreProgressManager.java:228)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$executeProcessUnderProgress$14(CoreProgressManager.java:681)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.registerIndicatorAndRun(CoreProgressManager.java:756)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.computeUnderProgress(CoreProgressManager.java:712)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.executeProcessUnderProgress(CoreProgressManager.java:680)
|
||||
at com.intellij.openapi.progress.impl.ProgressManagerImpl.executeProcessUnderProgress(ProgressManagerImpl.java:78)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.runProcess(CoreProgressManager.java:209)
|
||||
at com.intellij.openapi.progress.util.BackgroundTaskUtil.runUnderDisposeAwareIndicator(BackgroundTaskUtil.java:366)
|
||||
at com.intellij.openapi.progress.util.BackgroundTaskUtil.runUnderDisposeAwareIndicator(BackgroundTaskUtil.java:349)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl.continueLoadingWhenInitialized$lambda$8$lambda$7$lambda$6(DataSourceModelStorageImpl.kt:143)
|
||||
at com.intellij.database.util.AsyncTask$Frame$compute$$inlined$supply$1$1.run(AsyncTask.kt:878)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$executeProcessUnderProgress$14(CoreProgressManager.java:681)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.registerIndicatorAndRun(CoreProgressManager.java:756)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.computeUnderProgress(CoreProgressManager.java:712)
|
||||
at com.intellij.openapi.progress.impl.CoreProgressManager.executeProcessUnderProgress(CoreProgressManager.java:680)
|
||||
at com.intellij.openapi.progress.impl.ProgressManagerImpl.executeProcessUnderProgress(ProgressManagerImpl.java:78)
|
||||
at com.intellij.database.util.AsyncTask$Frame$compute$$inlined$supply$1.get(AsyncTask.kt:903)
|
||||
at java.base/java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1768)
|
||||
at com.intellij.util.concurrency.ChildContext$runInChildContext$1.invoke(propagation.kt:167)
|
||||
at com.intellij.util.concurrency.ChildContext$runInChildContext$1.invoke(propagation.kt:167)
|
||||
at com.intellij.util.concurrency.ChildContext.runInChildContext(propagation.kt:173)
|
||||
at com.intellij.util.concurrency.ChildContext.runInChildContext(propagation.kt:167)
|
||||
at com.intellij.util.concurrency.ContextRunnable.run(ContextRunnable.java:27)
|
||||
at com.intellij.database.dataSource.DataSourceModelStorageImpl$continueLoadingWhenInitialized$2$1$1$1$1.invokeSuspend(DataSourceModelStorageImpl.kt:142)
|
||||
at kotlin.coroutines.jvm.internal.BaseContinuationImpl.resumeWith(ContinuationImpl.kt:33)
|
||||
at kotlinx.coroutines.DispatchedTask.run(DispatchedTask.kt:100)
|
||||
at kotlinx.coroutines.scheduling.CoroutineScheduler.runSafely(CoroutineScheduler.kt:613)
|
||||
at kotlinx.coroutines.scheduling.CoroutineScheduler$Worker.executeTask(CoroutineScheduler.kt:1189)
|
||||
at kotlinx.coroutines.scheduling.CoroutineScheduler$Worker.runWorker(CoroutineScheduler.kt:778)
|
||||
at kotlinx.coroutines.scheduling.CoroutineScheduler$Worker.run(CoroutineScheduler.kt:765)
|
||||
Suppressed: java.lang.Exception: Storage[C:\Users\fabia\AppData\Local\JetBrains\PyCharm2025.2\data-source\e067176d\bf3bfb54-ef6e-4a90-8cb8-69f52581bab9\entities\entities.dat] registration stack trace
|
||||
at com.intellij.util.io.FilePageCache.registerPagedFileStorage(FilePageCache.java:438)
|
||||
... 65 more
|
||||
1714
.idea/dataSources/bf3bfb54-ef6e-4a90-8cb8-69f52581bab9.corrupted.20251218-033406.xml
generated
Normal file
1714
.idea/dataSources/bf3bfb54-ef6e-4a90-8cb8-69f52581bab9.corrupted.20251218-033406.xml
generated
Normal file
File diff suppressed because it is too large
Load Diff
1714
.idea/dataSources/bf3bfb54-ef6e-4a90-8cb8-69f52581bab9.xml
generated
Normal file
1714
.idea/dataSources/bf3bfb54-ef6e-4a90-8cb8-69f52581bab9.xml
generated
Normal file
File diff suppressed because it is too large
Load Diff
2
.idea/dataSources/bf3bfb54-ef6e-4a90-8cb8-69f52581bab9/storage_v2/_src_/schema/main.uQUzAA.meta
generated
Normal file
2
.idea/dataSources/bf3bfb54-ef6e-4a90-8cb8-69f52581bab9/storage_v2/_src_/schema/main.uQUzAA.meta
generated
Normal file
@@ -0,0 +1,2 @@
|
||||
#n:main
|
||||
!<md> [0, 0, null, null, -2147483648, -2147483648]
|
||||
2553
.idea/dataSources/c385b2f5-c801-4154-bc11-62182a8396b3.xml
generated
2553
.idea/dataSources/c385b2f5-c801-4154-bc11-62182a8396b3.xml
generated
File diff suppressed because it is too large
Load Diff
2
.idea/misc.xml
generated
2
.idea/misc.xml
generated
@@ -3,5 +3,5 @@
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.14" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.14" project-jdk-type="Python SDK" />
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.14 (Masterprojekt)" project-jdk-type="Python SDK" />
|
||||
</project>
|
||||
71
Berechnungen.py
Normal file
71
Berechnungen.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import sympy as sp
|
||||
from decimal import Decimal
|
||||
import math
|
||||
|
||||
class Berechnungen:
|
||||
def __init__(self, a, b):
|
||||
self.a_wert = a
|
||||
self.b_wert = b
|
||||
self.e_quadrat_wert = self.e_quadrat()
|
||||
self.e_strich_quadrat_wert = self.e_strich_quadrat()
|
||||
|
||||
def e_quadrat(self):
|
||||
return (self.a_wert**2 - self.b_wert**2) / self.a_wert **2
|
||||
|
||||
def e_strich_quadrat(self):
|
||||
return (self.a_wert**2 - self.b_wert**2) / self.b_wert **2
|
||||
|
||||
def P(self, x, y):
|
||||
return sp.sqrt(x**2 + y**2)
|
||||
|
||||
def hilfswinkel(self, z, x, y):
|
||||
hw = sp.atan2(z * self.a_wert, self.P(x, y) * self.b_wert)
|
||||
return hw
|
||||
|
||||
def B(self, z, x, y):
|
||||
hilfswinkel = self.hilfswinkel(z, x, y)
|
||||
B = sp.atan2((z + self.e_strich_quadrat_wert * self.b_wert * sp.sin(hilfswinkel) ** 3), (self.P(x, y) - self.e_quadrat_wert * self.a_wert * sp.cos(hilfswinkel) ** 3))
|
||||
return B
|
||||
|
||||
def L(self, x, y):
|
||||
return sp.atan2(y, x)
|
||||
|
||||
def H(self, x, y, z):
|
||||
B = self.B(z, x, y)
|
||||
H = (self.P(x, y) / sp.cos(B)) - self.a_wert / (sp.sqrt(1 - self.e_quadrat_wert * sp.sin(B) ** 2))
|
||||
return H
|
||||
|
||||
def geometrische_breite_laenge(self, dict_koordinaten):
|
||||
for punktnummer, matrix in dict_koordinaten.items():
|
||||
|
||||
dict_koordinaten[punktnummer] = [matrix, self.B(matrix[2], matrix[0], matrix[1]), self.L(matrix[0], matrix[1])]
|
||||
return dict_koordinaten
|
||||
|
||||
class Einheitenumrechnung:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def mas_to_rad(mas):
|
||||
umrechnungsfaktor = 1 / 1000 * 1 / 3600 * sp.pi / 180
|
||||
grad = mas * umrechnungsfaktor
|
||||
return grad
|
||||
|
||||
def mm_to_m(mm):
|
||||
m = mm / 1000
|
||||
return m
|
||||
|
||||
def ppb(ppb):
|
||||
ppb *= 10 ** (-9)
|
||||
return ppb
|
||||
|
||||
def gon_to_rad_Decimal(gon):
|
||||
gon = Decimal(gon)
|
||||
pi = Decimal(str(math.pi))
|
||||
rad = (gon / Decimal(200)) * pi
|
||||
return rad
|
||||
|
||||
def mgon_to_rad_Decimal(gon):
|
||||
gon = Decimal(gon)
|
||||
pi = Decimal(str(math.pi))
|
||||
rad = (gon / Decimal(200000)) * pi
|
||||
return rad
|
||||
BIN
Campusnetz.db
BIN
Campusnetz.db
Binary file not shown.
774
Campusnetz.ipynb
774
Campusnetz.ipynb
@@ -4,7 +4,11 @@
|
||||
"cell_type": "code",
|
||||
"id": "initial_id",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
"collapsed": true,
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-12-23T08:39:33.840440Z",
|
||||
"start_time": "2025-12-23T08:39:33.100346Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"# Hier werden alle verwendeten Pythonmodule importiert\n",
|
||||
@@ -13,13 +17,25 @@
|
||||
"import importlib\n",
|
||||
"import Koordinatentransformationen\n",
|
||||
"import sqlite3\n",
|
||||
"import Funktionales_Modell"
|
||||
"import Funktionales_Modell\n",
|
||||
"import Berechnungen\n",
|
||||
"import Parameterschaetzung\n",
|
||||
"import Stochastisches_Modell\n",
|
||||
"from Stochastisches_Modell import StochastischesModell\n",
|
||||
"import Export\n",
|
||||
"import Netzqualität_Genauigkeit\n",
|
||||
"import Datumsfestlegung"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
"execution_count": 1
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-12-23T08:39:34.656550Z",
|
||||
"start_time": "2025-12-23T08:39:34.647503Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Datenbank)\n",
|
||||
@@ -34,10 +50,15 @@
|
||||
],
|
||||
"id": "82d514cd426db78b",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
"execution_count": 2
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-12-23T08:39:35.391343Z",
|
||||
"start_time": "2025-12-23T08:39:35.385408Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Import der Koordinatendatei(en) vom Tachymeter\n",
|
||||
@@ -45,11 +66,24 @@
|
||||
"imp.import_koordinaten_lh_tachymeter(pfad_datei)"
|
||||
],
|
||||
"id": "d3bce3991a8962dc",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Der Import wurde abgebrochen, weil mindestens ein Teil der Punktnummern aus der Datei Daten\\campsnetz_koordinaten_bereinigt.csv bereits in der Datenbank vorhanden ist. Bitte in der Datei ändern und Import wiederholen.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"execution_count": 3
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-12-23T08:39:35.987063Z",
|
||||
"start_time": "2025-12-23T08:39:35.973195Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Datenbank)\n",
|
||||
@@ -58,11 +92,201 @@
|
||||
"print(db_zugriff.get_koordinaten(\"naeherung_lh\"))"
|
||||
],
|
||||
"id": "196ff0c8f8b5aea1",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'10009': Matrix([\n",
|
||||
"[1000.0],\n",
|
||||
"[2000.0],\n",
|
||||
"[ 100.0]]), '10006': Matrix([\n",
|
||||
"[ 1000.0],\n",
|
||||
"[2032.6863],\n",
|
||||
"[ 99.5825]]), '10010': Matrix([\n",
|
||||
"[1011.8143],\n",
|
||||
"[1973.3252],\n",
|
||||
"[ 99.9259]]), '10018': Matrix([\n",
|
||||
"[1008.5759],\n",
|
||||
"[ 1942.762],\n",
|
||||
"[ 100.2553]]), '10008': Matrix([\n",
|
||||
"[979.7022],\n",
|
||||
"[1991.401],\n",
|
||||
"[ 99.732]]), '10005': Matrix([\n",
|
||||
"[ 966.5154],\n",
|
||||
"[2014.6496],\n",
|
||||
"[ 99.72]]), '10003': Matrix([\n",
|
||||
"[ 908.4312],\n",
|
||||
"[1996.1248],\n",
|
||||
"[ 99.7403]]), '10004': Matrix([\n",
|
||||
"[ 954.1536],\n",
|
||||
"[2021.6822],\n",
|
||||
"[ 99.4916]]), '10007': Matrix([\n",
|
||||
"[ 921.7481],\n",
|
||||
"[1973.6201],\n",
|
||||
"[ 99.9176]]), '10001': Matrix([\n",
|
||||
"[ 833.9439],\n",
|
||||
"[1978.3737],\n",
|
||||
"[ 99.8946]]), '10002': Matrix([\n",
|
||||
"[ 875.9684],\n",
|
||||
"[1998.5174],\n",
|
||||
"[ 99.5867]]), '10016': Matrix([\n",
|
||||
"[ 928.2783],\n",
|
||||
"[1944.0082],\n",
|
||||
"[ 100.0459]]), '10011': Matrix([\n",
|
||||
"[844.9567],\n",
|
||||
"[1891.157],\n",
|
||||
"[ 99.8117]]), '10026': Matrix([\n",
|
||||
"[1020.0059],\n",
|
||||
"[1913.8703],\n",
|
||||
"[ 100.3059]]), '10027': Matrix([\n",
|
||||
"[1016.9451],\n",
|
||||
"[1866.2914],\n",
|
||||
"[ 100.3251]]), '10043': Matrix([\n",
|
||||
"[1031.2077],\n",
|
||||
"[1822.4739],\n",
|
||||
"[ 100.3035]]), '10044': Matrix([\n",
|
||||
"[ 1025.976],\n",
|
||||
"[1782.4835],\n",
|
||||
"[ 100.5461]]), '10021': Matrix([\n",
|
||||
"[ 992.7607],\n",
|
||||
"[1904.8854],\n",
|
||||
"[ 100.3533]]), '10020': Matrix([\n",
|
||||
"[ 984.6187],\n",
|
||||
"[1903.3601],\n",
|
||||
"[ 100.3423]]), '10024': Matrix([\n",
|
||||
"[ 997.4831],\n",
|
||||
"[1881.7862],\n",
|
||||
"[ 100.3032]]), '10025': Matrix([\n",
|
||||
"[996.3241],\n",
|
||||
"[1866.844],\n",
|
||||
"[100.4102]]), '10022': Matrix([\n",
|
||||
"[990.0679],\n",
|
||||
"[1896.536],\n",
|
||||
"[100.2194]]), '10023': Matrix([\n",
|
||||
"[ 987.3223],\n",
|
||||
"[1889.8762],\n",
|
||||
"[ 100.343]]), '10019': Matrix([\n",
|
||||
"[ 962.6387],\n",
|
||||
"[1902.3565],\n",
|
||||
"[ 99.9772]]), '10033': Matrix([\n",
|
||||
"[ 964.0191],\n",
|
||||
"[1860.8023],\n",
|
||||
"[ 99.8551]]), '10017': Matrix([\n",
|
||||
"[ 931.6761],\n",
|
||||
"[1900.9945],\n",
|
||||
"[ 99.9572]]), '10052': Matrix([\n",
|
||||
"[ 1037.875],\n",
|
||||
"[1757.2999],\n",
|
||||
"[ 100.2737]]), '10042': Matrix([\n",
|
||||
"[1017.3489],\n",
|
||||
"[1803.0742],\n",
|
||||
"[ 100.3441]]), '10053': Matrix([\n",
|
||||
"[1033.3758],\n",
|
||||
"[1723.4258],\n",
|
||||
"[ 100.2774]]), '10037': Matrix([\n",
|
||||
"[ 966.2253],\n",
|
||||
"[1774.2051],\n",
|
||||
"[ 99.9957]]), '10040': Matrix([\n",
|
||||
"[ 990.8832],\n",
|
||||
"[1780.9678],\n",
|
||||
"[ 100.1677]]), '10041': Matrix([\n",
|
||||
"[993.2769],\n",
|
||||
"[1812.031],\n",
|
||||
"[100.4749]]), '10038': Matrix([\n",
|
||||
"[ 958.1899],\n",
|
||||
"[1804.7135],\n",
|
||||
"[ 100.0741]]), '10051': Matrix([\n",
|
||||
"[1008.9811],\n",
|
||||
"[1750.1838],\n",
|
||||
"[ 100.288]]), '10036': Matrix([\n",
|
||||
"[ 948.6403],\n",
|
||||
"[1763.5807],\n",
|
||||
"[ 100.0063]]), '10035': Matrix([\n",
|
||||
"[ 910.1265],\n",
|
||||
"[1768.0099],\n",
|
||||
"[ 100.0781]]), '10039': Matrix([\n",
|
||||
"[ 960.3884],\n",
|
||||
"[1820.0543],\n",
|
||||
"[ 100.0983]]), '10059': Matrix([\n",
|
||||
"[1049.2587],\n",
|
||||
"[1662.5451],\n",
|
||||
"[ 100.0148]]), '10050': Matrix([\n",
|
||||
"[1010.0246],\n",
|
||||
"[1726.2445],\n",
|
||||
"[ 100.1493]]), '10013': Matrix([\n",
|
||||
"[900.9076],\n",
|
||||
"[1902.873],\n",
|
||||
"[ 99.7911]]), '10028': Matrix([\n",
|
||||
"[ 853.9608],\n",
|
||||
"[1815.7417],\n",
|
||||
"[ 99.7793]]), '10012': Matrix([\n",
|
||||
"[ 895.3032],\n",
|
||||
"[1924.1523],\n",
|
||||
"[ 99.8758]]), '10014': Matrix([\n",
|
||||
"[ 913.9706],\n",
|
||||
"[1918.7731],\n",
|
||||
"[ 99.8872]]), '10031': Matrix([\n",
|
||||
"[ 937.1557],\n",
|
||||
"[1855.2805],\n",
|
||||
"[ 99.8479]]), '10015': Matrix([\n",
|
||||
"[ 912.5157],\n",
|
||||
"[1937.6471],\n",
|
||||
"[ 99.9834]]), '10032': Matrix([\n",
|
||||
"[ 954.6732],\n",
|
||||
"[1845.9356],\n",
|
||||
"[ 99.724]]), '10030': Matrix([\n",
|
||||
"[ 908.4749],\n",
|
||||
"[1828.8008],\n",
|
||||
"[ 99.5581]]), '10029': Matrix([\n",
|
||||
"[ 909.3343],\n",
|
||||
"[1814.8767],\n",
|
||||
"[ 99.5486]]), '10034': Matrix([\n",
|
||||
"[ 860.2357],\n",
|
||||
"[1758.9282],\n",
|
||||
"[ 99.737]]), '10045': Matrix([\n",
|
||||
"[867.2324],\n",
|
||||
"[1705.063],\n",
|
||||
"[ 99.7214]]), '10049': Matrix([\n",
|
||||
"[ 985.2561],\n",
|
||||
"[1715.2109],\n",
|
||||
"[ 99.9965]]), '10048': Matrix([\n",
|
||||
"[ 957.3889],\n",
|
||||
"[1716.2949],\n",
|
||||
"[ 99.7212]]), '10047': Matrix([\n",
|
||||
"[ 929.5334],\n",
|
||||
"[1712.6429],\n",
|
||||
"[ 99.6076]]), '10046': Matrix([\n",
|
||||
"[ 910.663],\n",
|
||||
"[1716.0969],\n",
|
||||
"[ 99.5459]]), '10057': Matrix([\n",
|
||||
"[969.6876],\n",
|
||||
"[1655.597],\n",
|
||||
"[ 99.7039]]), '10055': Matrix([\n",
|
||||
"[ 922.4731],\n",
|
||||
"[1647.7452],\n",
|
||||
"[ 99.4658]]), '10054': Matrix([\n",
|
||||
"[ 860.4481],\n",
|
||||
"[1636.6722],\n",
|
||||
"[ 99.7093]]), '10058': Matrix([\n",
|
||||
"[1013.2592],\n",
|
||||
"[1646.6356],\n",
|
||||
"[ 99.8513]]), '10056': Matrix([\n",
|
||||
"[ 939.9763],\n",
|
||||
"[1636.4179],\n",
|
||||
"[ 99.4027]])}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"execution_count": 4
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-12-23T08:39:36.705490Z",
|
||||
"start_time": "2025-12-23T08:39:36.690491Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Datenbank)\n",
|
||||
@@ -71,60 +295,305 @@
|
||||
"print(db_zugriff.get_koordinaten(\"naeherung_us\"))"
|
||||
],
|
||||
"id": "3989b7b41874c16a",
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'10009': Matrix([\n",
|
||||
"[3794767.4719546097],\n",
|
||||
"[ 546740.0869962516],\n",
|
||||
"[ 5080165.952124462]]), '10006': Matrix([\n",
|
||||
"[3794766.3557482935],\n",
|
||||
"[ 546707.6385009313],\n",
|
||||
"[5080169.7334700795]]), '10010': Matrix([\n",
|
||||
"[3794758.6366199246],\n",
|
||||
"[ 546767.6665772106],\n",
|
||||
"[5080169.4644999765]]), '10018': Matrix([\n",
|
||||
"[3794762.2481267513],\n",
|
||||
"[ 546797.6912507551],\n",
|
||||
"[ 5080163.980380166]]), '10008': Matrix([\n",
|
||||
"[3794783.8581],\n",
|
||||
"[ 546746.6347],\n",
|
||||
"[5080152.7404]]), '10005': Matrix([\n",
|
||||
"[3794793.841662743],\n",
|
||||
"[546722.3209011297],\n",
|
||||
"[5080147.930942906]]), '10003': Matrix([\n",
|
||||
"[3794841.051609108],\n",
|
||||
"[546735.1152754558],\n",
|
||||
"[5080111.543399332]]), '10004': Matrix([\n",
|
||||
"[3794803.4594055074],\n",
|
||||
"[ 546714.1406417021],\n",
|
||||
"[ 5080141.382390101]]), '10007': Matrix([\n",
|
||||
"[3794831.046531049],\n",
|
||||
"[546758.7254701178],\n",
|
||||
"[5080116.663324944]]), '10001': Matrix([\n",
|
||||
"[3794901.5252],\n",
|
||||
"[ 546745.559],\n",
|
||||
"[5080065.7672]]), '10002': Matrix([\n",
|
||||
"[3794866.9711],\n",
|
||||
"[ 546729.5958],\n",
|
||||
"[5080092.6364]]), '10016': Matrix([\n",
|
||||
"[3794826.658374741],\n",
|
||||
"[546788.7275390101],\n",
|
||||
"[5080116.868237535]]), '10011': Matrix([\n",
|
||||
"[3794894.922579663],\n",
|
||||
"[546833.1159754294],\n",
|
||||
"[5080061.151341954]]), '10026': Matrix([\n",
|
||||
"[3794753.8595],\n",
|
||||
"[ 546827.4296],\n",
|
||||
"[5080167.0938]]), '10027': Matrix([\n",
|
||||
"[3794757.591261769],\n",
|
||||
"[546874.3314003296],\n",
|
||||
"[5080159.317534195]]), '10043': Matrix([\n",
|
||||
"[3794747.2737986287],\n",
|
||||
"[ 546919.1497828952],\n",
|
||||
"[ 5080162.149716094]]), '10044': Matrix([\n",
|
||||
"[3794752.6696],\n",
|
||||
"[ 546958.3218],\n",
|
||||
"[5080154.2579]]), '10021': Matrix([\n",
|
||||
"[3794776.0295716925],\n",
|
||||
"[ 546833.7406948799],\n",
|
||||
"[ 5080150.012973846]]), '10020': Matrix([\n",
|
||||
"[ 3794782.610580881],\n",
|
||||
"[ 546834.470509102],\n",
|
||||
"[5080145.0361413695]]), '10024': Matrix([\n",
|
||||
"[3794772.816135807],\n",
|
||||
"[ 546857.095708699],\n",
|
||||
"[5080149.834714163]]), '10025': Matrix([\n",
|
||||
"[3794774.2085619094],\n",
|
||||
"[ 546871.8107307912],\n",
|
||||
"[ 5080147.359175114]]), '10022': Matrix([\n",
|
||||
"[3794778.3371531744],\n",
|
||||
"[ 546841.7501872958],\n",
|
||||
"[ 5080147.275074134]]), '10023': Matrix([\n",
|
||||
"[3794780.7952114563],\n",
|
||||
"[ 546848.1012091675],\n",
|
||||
"[ 5080144.924922213]]), '10019': Matrix([\n",
|
||||
"[3794800.0946706245],\n",
|
||||
"[ 546833.3239614451],\n",
|
||||
"[ 5080131.724532257]]), '10033': Matrix([\n",
|
||||
"[3794800.0160474544],\n",
|
||||
"[ 546874.6524563388],\n",
|
||||
"[ 5080127.204744104]]), '10017': Matrix([\n",
|
||||
"[3794825.016154114],\n",
|
||||
"[546831.6998861503],\n",
|
||||
"[5080113.374792286]]), '10052': Matrix([\n",
|
||||
"[3794743.6262089056],\n",
|
||||
"[ 546984.415934838],\n",
|
||||
"[ 5080157.831166813]]), '10042': Matrix([\n",
|
||||
"[ 3794758.957179171],\n",
|
||||
"[ 546937.0599021759],\n",
|
||||
"[5080151.6103044115]]), '10053': Matrix([\n",
|
||||
"[ 3794748.14608301],\n",
|
||||
"[547017.5748381803],\n",
|
||||
"[5080150.930072506]]), '10037': Matrix([\n",
|
||||
"[3794800.5693],\n",
|
||||
"[ 546960.7477],\n",
|
||||
"[ 5080117.665]]), '10040': Matrix([\n",
|
||||
"[3794780.720877459],\n",
|
||||
"[546956.4249913145],\n",
|
||||
"[5080133.161471092]]), '10041': Matrix([\n",
|
||||
"[3794778.153328699],\n",
|
||||
"[ 546925.877928891],\n",
|
||||
"[5080138.722313838]]), '10038': Matrix([\n",
|
||||
"[3794806.3233483736],\n",
|
||||
"[ 546929.7308726012],\n",
|
||||
"[ 5080116.89880491]]), '10051': Matrix([\n",
|
||||
"[3794767.0574626415],\n",
|
||||
"[ 546988.6993708528],\n",
|
||||
"[ 5080139.997874675]]), '10036': Matrix([\n",
|
||||
"[3794815.0546409036],\n",
|
||||
"[ 546969.5966706082],\n",
|
||||
"[ 5080106.064114862]]), '10035': Matrix([\n",
|
||||
"[3794845.948751911],\n",
|
||||
"[ 546961.512678588],\n",
|
||||
"[5080084.087510971]]), '10039': Matrix([\n",
|
||||
"[3794804.1623731344],\n",
|
||||
"[ 546914.7316360716],\n",
|
||||
"[ 5080120.139242563]]), '10059': Matrix([\n",
|
||||
"[3794736.9649],\n",
|
||||
"[ 547079.4678],\n",
|
||||
"[5080152.3224]]), '10050': Matrix([\n",
|
||||
"[3794766.7719544796],\n",
|
||||
"[ 547012.5266236273],\n",
|
||||
"[ 5080137.484970744]]), '10013': Matrix([\n",
|
||||
"[3794849.6087244693],\n",
|
||||
"[ 546826.8685540904],\n",
|
||||
"[ 5080095.43002485]]), '10028': Matrix([\n",
|
||||
"[3794889.7348],\n",
|
||||
"[ 546908.7636],\n",
|
||||
"[5080056.9381]]), '10012': Matrix([\n",
|
||||
"[3794853.6002710722],\n",
|
||||
"[ 546805.2364847381],\n",
|
||||
"[ 5080094.889461209]]), '10014': Matrix([\n",
|
||||
"[3794838.7464],\n",
|
||||
"[ 546812.3658],\n",
|
||||
"[ 5080105.2]]), '10031': Matrix([\n",
|
||||
"[3794821.7594477106],\n",
|
||||
"[ 546877.5480584177],\n",
|
||||
"[ 5080110.746046175]]), '10015': Matrix([\n",
|
||||
"[3794839.4650256806],\n",
|
||||
"[ 546793.5165545414],\n",
|
||||
"[5080106.7712153485]]), '10032': Matrix([\n",
|
||||
"[3794807.848210704],\n",
|
||||
"[546888.4861254627],\n",
|
||||
"[5080119.745908576]]), '10030': Matrix([\n",
|
||||
"[3794845.353156385],\n",
|
||||
"[546901.0274418414],\n",
|
||||
"[5080090.356531718]]), '10029': Matrix([\n",
|
||||
"[3794845.026354165],\n",
|
||||
"[546914.9167077399],\n",
|
||||
"[5080089.099946169]]), '10034': Matrix([\n",
|
||||
"[3794886.104894752],\n",
|
||||
"[546965.6987415539],\n",
|
||||
"[ 5080053.40592357]]), '10045': Matrix([\n",
|
||||
"[3794881.900452307],\n",
|
||||
"[547019.7835874384],\n",
|
||||
"[5080050.715777841]]), '10049': Matrix([\n",
|
||||
"[3794786.8907962884],\n",
|
||||
"[ 547021.0765699627],\n",
|
||||
"[ 5080121.444681106]]), '10048': Matrix([\n",
|
||||
"[3794809.106679632],\n",
|
||||
"[547017.3023106218],\n",
|
||||
"[5080105.014391199]]), '10047': Matrix([\n",
|
||||
"[3794831.5349817923],\n",
|
||||
"[ 547018.2393882351],\n",
|
||||
"[ 5080088.124038595]]), '10046': Matrix([\n",
|
||||
"[3794846.5803718665],\n",
|
||||
"[ 547012.9971156706],\n",
|
||||
"[ 5080077.440420756]]), '10057': Matrix([\n",
|
||||
"[3794800.819370702],\n",
|
||||
"[ 547078.671611169],\n",
|
||||
"[ 5080104.57270624]]), '10055': Matrix([\n",
|
||||
"[3794838.851977278],\n",
|
||||
"[ 547081.903863645],\n",
|
||||
"[5080075.698247853]]), '10054': Matrix([\n",
|
||||
"[3794889.0494],\n",
|
||||
"[ 547086.9874],\n",
|
||||
"[5080038.1528]]), '10058': Matrix([\n",
|
||||
"[3794766.1088143717],\n",
|
||||
"[ 547091.7542871874],\n",
|
||||
"[ 5080129.120881729]]), '10056': Matrix([\n",
|
||||
"[3794825.041003442],\n",
|
||||
"[547094.8115741647],\n",
|
||||
"[5080084.488768324]]), '0645': Matrix([\n",
|
||||
"[3793994.4529],\n",
|
||||
"[ 495758.0093],\n",
|
||||
"[5085958.2047]]), '0648': Matrix([\n",
|
||||
"[3762551.5682],\n",
|
||||
"[ 538424.8576],\n",
|
||||
"[5104809.1503]]), '0656': Matrix([\n",
|
||||
"[3794838.5802],\n",
|
||||
"[ 546995.3112],\n",
|
||||
"[5080116.5503]]), '0995': Matrix([\n",
|
||||
"[3794519.9177],\n",
|
||||
"[ 588539.9138],\n",
|
||||
"[5075743.9332]]), '1675': Matrix([\n",
|
||||
"[3813621.0427],\n",
|
||||
"[ 566004.8947],\n",
|
||||
"[ 5064056.93]]), 'ESTE': Matrix([\n",
|
||||
"[3816914.711],\n",
|
||||
"[ 507636.812],\n",
|
||||
"[5067733.467]]), 'GNA2': Matrix([\n",
|
||||
"[3767530.6335],\n",
|
||||
"[ 597990.0978],\n",
|
||||
"[5094563.5073]])}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"execution_count": 5
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-12-23T08:39:37.518112Z",
|
||||
"start_time": "2025-12-23T08:39:37.508774Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Import)\n",
|
||||
"imp = Import.Import(pfad_datenbank)\n",
|
||||
"\n",
|
||||
"pfad_koordinaten_gnss = r\"Daten\\Koordinaten_OL_umliegend_bereinigt.csv\"\n",
|
||||
"# X, Y, Z der SAPOS-Stationen\n",
|
||||
"genauigkeit_sapos_referenzstationen = [0.05, 0.04, 0.09]\n",
|
||||
"\n",
|
||||
"imp.import_koordinaten_gnss(pfad_koordinaten_gnss, genauigkeit_sapos_referenzstationen)\n"
|
||||
],
|
||||
"id": "7b6a359712fe858e",
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'Import der Koordinaten aus stationärem GNSS abgeschlossen.'"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"execution_count": 6
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-12-23T08:50:23.112559Z",
|
||||
"start_time": "2025-12-23T08:50:23.042100Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Datumsgebende Koordinaten bestimmen\n",
|
||||
"importlib.reload(Datenbank)\n",
|
||||
"db_zugriff = Datenbank.Datenbankzugriff(pfad_datenbank)\n",
|
||||
"\n",
|
||||
"liste_koordinaten_x = [10026]\n",
|
||||
"liste_koordinaten_y = [10059]\n",
|
||||
"liste_koordinaten_z = [10028]\n",
|
||||
"liste_koordinaten_x_y_z = [10008, 10001]\n",
|
||||
"\n",
|
||||
"db_zugriff.set_datumskoordinaten(liste_koordinaten_x, liste_koordinaten_y, liste_koordinaten_z, liste_koordinaten_x_y_z)\n",
|
||||
"\n",
|
||||
"# Datumgebende Koordinaten entfernen\n",
|
||||
"liste_koordinaten_x = [10026]\n",
|
||||
"liste_koordinaten_y = [10059]\n",
|
||||
"liste_koordinaten_z = [10028]\n",
|
||||
"liste_koordinaten_x_y_z = [10001]\n",
|
||||
"\n",
|
||||
"db_zugriff.set_datumskoordinaten_to_neupunkte(liste_koordinaten_x, liste_koordinaten_y, liste_koordinaten_z, liste_koordinaten_x_y_z)"
|
||||
],
|
||||
"id": "5f786757ba89d5d0",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
"execution_count": 17
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# ToDo: Sobald GNSS vorliegend Koordinaten im ETRS89 / DREF 91 (2025) daraus berechnen!\n",
|
||||
"liste_koordinaten_naeherung_us_alt = {\n",
|
||||
" 10001: (3794874.98408291, 546741.751930012, 5079995.3838),\n",
|
||||
" 10002: (3794842.53340714, 546726.907150697, 5080039.8778),\n",
|
||||
" 10008: (3794757.41294192, 546742.822339098, 5080107.3198),\n",
|
||||
" 10012: (3794827.11937161, 546801.412652168, 5080028.5852),\n",
|
||||
" 10026: (3794727.06042449, 546823.571170112, 5080134.2029),\n",
|
||||
" 10028: (3794862.91900719, 546904.943464041, 5079920.8994),\n",
|
||||
" 10037: (3794774.14751515, 546955.423068316, 5079960.9426),\n",
|
||||
" 10044: (3794725.78597473, 546954.557211544, 5080009.9234),\n",
|
||||
" 10054: (3794852.07416848, 547094.399826613, 5079715.1737),\n",
|
||||
" 10059: (3794710.34348443, 547075.630380075, 5080119.6491),\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"liste_koordinaten_naeherung_us_V2 = {\n",
|
||||
" 10001: (3794874.984, 546741.752, 5080029.990),\n",
|
||||
" 10002: (3794842.533, 546726.907, 5080071.133),\n",
|
||||
" 10008: (3794757.413, 546742.822, 5080135.400),\n",
|
||||
" 10012: (3794827.119, 546801.413, 5080065.404),\n",
|
||||
" 10026: (3794727.060, 546823.571, 5080179.951),\n",
|
||||
" 10028: (3794862.919, 546904.943, 5079963.214),\n",
|
||||
" 10037: (3794774.148, 546955.423, 5080040.520),\n",
|
||||
" 10044: (3794725.786, 546954.557, 5080084.411),\n",
|
||||
" 10054: (3794852.074, 547094.400, 5079771.845),\n",
|
||||
" 10059: (3794710.343, 547075.630, 5080153.653),\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"liste_koordinaten_naeherung_us = {\n",
|
||||
" 10001: (3794874.984, 546741.752, 5080029.990),\n",
|
||||
" 10002: (3794842.533, 546726.907, 5080071.133),\n",
|
||||
" 10037: (3794774.148, 546955.423, 5080040.520),\n",
|
||||
" 10044: (3794725.786, 546954.557, 5080084.411),\n",
|
||||
"}\n",
|
||||
"#liste_koordinaten_naeherung_us = {\n",
|
||||
"# 10001: (3794874.984, 546741.752, 5080029.990),\n",
|
||||
"# 10002: (3794842.533, 546726.907, 5080071.133),\n",
|
||||
"# 10037: (3794774.148, 546955.423, 5080040.520),\n",
|
||||
"# 10044: (3794725.786, 546954.557, 5080084.411),\n",
|
||||
"#}\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"con = sqlite3.connect(pfad_datenbank)\n",
|
||||
"cursor = con.cursor()\n",
|
||||
"sql = \"\"\"\n",
|
||||
"UPDATE Netzpunkte\n",
|
||||
"SET naeherungx_us = ?, naeherungy_us = ?, naeherungz_us = ?\n",
|
||||
"WHERE punktnummer = ?\n",
|
||||
"\"\"\"\n",
|
||||
"for punktnummer, (x, y, z) in liste_koordinaten_naeherung_us.items():\n",
|
||||
" cursor.execute(sql, (x, y, z, punktnummer))\n",
|
||||
"con.commit()\n",
|
||||
"cursor.close()\n",
|
||||
"con.close()"
|
||||
"#con = sqlite3.connect(pfad_datenbank)\n",
|
||||
"#cursor = con.cursor()\n",
|
||||
"#sql = \"\"\"\n",
|
||||
"#UPDATE Netzpunkte\n",
|
||||
"#SET naeherungx_us = ?, naeherungy_us = ?, naeherungz_us = ?\n",
|
||||
"#WHERE punktnummer = ?\n",
|
||||
"#\"\"\"\n",
|
||||
"#for punktnummer, (x, y, z) in #liste_koordinaten_naeherung_us.items():\n",
|
||||
"# cursor.execute(sql, (x, y, z, punktnummer))\n",
|
||||
"#con.commit()\n",
|
||||
"#cursor.close()\n",
|
||||
"#con.close()"
|
||||
],
|
||||
"id": "f64d9c01318b40f1",
|
||||
"outputs": [],
|
||||
@@ -260,14 +729,35 @@
|
||||
"importlib.reload(Datenbank)\n",
|
||||
"db_zugriff = Datenbank.Datenbankzugriff(pfad_datenbank)\n",
|
||||
"\n",
|
||||
"db_zugriff.get_instrument(\"Tachymeter\")\n",
|
||||
"db_zugriff.get_instrument_liste(\"Tachymeter\")\n",
|
||||
"db_zugriff.set_instrument(\"Tachymeter\", \"Trimble S9\")\n",
|
||||
"db_zugriff.get_instrument(\"Tachymeter\")"
|
||||
"db_zugriff.get_instrument_liste(\"Tachymeter\")"
|
||||
],
|
||||
"id": "e376b4534297016c",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"#Importieren der apriori Genauigkeitsinformationen\n",
|
||||
"#Zulässige Beobachtungsarten = \"Tachymeter_Richtung\", \"Tachymeter_Strecke\"\n",
|
||||
"# Wenn Beobachtungsart = \"Tachymeter_Richtung\" --> Übergabe in Milligon und nur Stabw_apriori_konst\n",
|
||||
"# Wenn Beobachtungsart = \"Tachymeter_Strecke\" --> Übergabe Stabw_apriori_konst in Millimeter und Stabw_apriori_streckenprop in ppm\n",
|
||||
"\n",
|
||||
"importlib.reload(Datenbank)\n",
|
||||
"db_zugriff = Datenbank.Datenbankzugriff(pfad_datenbank)\n",
|
||||
"importlib.reload(Berechnungen)\n",
|
||||
"\n",
|
||||
"db_zugriff.set_genauigkeiten(1, \"Tachymeter_Richtung\", 0.15)\n",
|
||||
"db_zugriff.set_genauigkeiten(1, \"Tachymeter_Strecke\", 0.8, 1)\n",
|
||||
"db_zugriff.set_genauigkeiten(1, \"Tachymeter_Zenitwinkel\", 0.15)"
|
||||
],
|
||||
"id": "97e24245ce3398a2",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
@@ -292,15 +782,177 @@
|
||||
"importlib.reload(Datenbank)\n",
|
||||
"db_zugriff = Datenbank.Datenbankzugriff(pfad_datenbank)\n",
|
||||
"\n",
|
||||
"# Parameter des GRS80-ellipsoids (Bezugsellipsoid des ETRS89 / DREF 91 (2025)\n",
|
||||
"# ToDo: Quelle mit möglichst genauen Parametern heraussuchen!\n",
|
||||
"a = 6378137.0 #m\n",
|
||||
"b = 63567552.314 #m\n",
|
||||
"\n",
|
||||
"importlib.reload(Funktionales_Modell)\n",
|
||||
"fm = Funktionales_Modell.FunktionalesModell(pfad_datenbank)\n",
|
||||
"fm = Funktionales_Modell.FunktionalesModell(pfad_datenbank, a, b)\n",
|
||||
"\n",
|
||||
"#db_zugriff.get_beobachtungen_id_standpunkt_zielpunkt(\"tachymeter_distanz\")\n",
|
||||
"fm.jacobi_matrix_symbolisch()"
|
||||
"Jacobimatrix_symbolisch = fm.jacobi_matrix_symbolisch()[0]\n",
|
||||
"Jacobimatrix_symbolisch_liste_unbekannte = fm.jacobi_matrix_symbolisch()[1]\n",
|
||||
"Jacobimatrix_symbolisch_liste_beobachtungsvektor = fm.jacobi_matrix_symbolisch()[2]"
|
||||
],
|
||||
"id": "d38939f7108e1788",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Datenbank)\n",
|
||||
"db_zugriff = Datenbank.Datenbankzugriff(pfad_datenbank)\n",
|
||||
"importlib.reload(Funktionales_Modell)\n",
|
||||
"fm = Funktionales_Modell.FunktionalesModell(pfad_datenbank, a, b)\n",
|
||||
"\n",
|
||||
"A_matrix_numerisch_iteration0 = fm.jacobi_matrix_zahlen_iteration_0(Jacobimatrix_symbolisch, \"naeherung_us\", Jacobimatrix_symbolisch_liste_unbekannte, Jacobimatrix_symbolisch_liste_beobachtungsvektor)"
|
||||
],
|
||||
"id": "4a0b1790c65d59ee",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Funktionales_Modell)\n",
|
||||
"fm = Funktionales_Modell.FunktionalesModell(pfad_datenbank, a, b)\n",
|
||||
"\n",
|
||||
"beobachtungsvektor_numerisch = fm.beobachtungsvektor_numerisch(Jacobimatrix_symbolisch_liste_beobachtungsvektor)"
|
||||
],
|
||||
"id": "38f698b6694bebe7",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Funktionales_Modell)\n",
|
||||
"fm = Funktionales_Modell.FunktionalesModell(pfad_datenbank, a, b)\n",
|
||||
"\n",
|
||||
"beobachtungsvektor_naeherung_symbolisch = fm.beobachtungsvektor_naeherung_symbolisch(Jacobimatrix_symbolisch_liste_beobachtungsvektor)"
|
||||
],
|
||||
"id": "e5cca13bbb6b95c5",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Funktionales_Modell)\n",
|
||||
"fm = Funktionales_Modell.FunktionalesModell(pfad_datenbank, a, b)\n",
|
||||
"\n",
|
||||
"beobachtungsvektor_naeherung_numerisch_iteration0 = fm.beobachtungsvektor_naeherung_numerisch_iteration0(Jacobimatrix_symbolisch_liste_beobachtungsvektor, beobachtungsvektor_naeherung_symbolisch)"
|
||||
],
|
||||
"id": "eb0452c52e7afa6b",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Auftstellen der Qll-Matrix\n",
|
||||
"importlib.reload(Stochastisches_Modell)\n",
|
||||
"stoch_modell = Stochastisches_Modell.StochastischesModell(A_matrix_numerisch_iteration0.rows)\n",
|
||||
"\n",
|
||||
"Qll_matrix_symbolisch = stoch_modell.Qll_symbolisch(pfad_datenbank, Jacobimatrix_symbolisch_liste_beobachtungsvektor)\n",
|
||||
"Qll_matrix_numerisch = stoch_modell.Qll_numerisch(pfad_datenbank, Qll_matrix_symbolisch,Jacobimatrix_symbolisch_liste_beobachtungsvektor)"
|
||||
],
|
||||
"id": "40a3df8fe549c81",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": "",
|
||||
"id": "8e2aa544249c9d29",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": "",
|
||||
"id": "b479d3a946400ff6",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": "",
|
||||
"id": "5d47e0771b22eb0b",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"importlib.reload(Funktionales_Modell)\n",
|
||||
"fm = Funktionales_Modell.FunktionalesModell(pfad_datenbank, a, b)\n",
|
||||
"\n",
|
||||
"importlib.reload(Parameterschaetzung)\n",
|
||||
"importlib.reload(Stochastisches_Modell)\n",
|
||||
"\n",
|
||||
"importlib.reload(Netzqualität_Genauigkeit)\n",
|
||||
"importlib.reload(Export)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"stoch_modell = Stochastisches_Modell.StochastischesModell(A_matrix_numerisch_iteration0.rows)\n",
|
||||
"\n",
|
||||
"dx = Parameterschaetzung.ausgleichung_global(A_matrix_numerisch_iteration0, fm.berechnung_dl(beobachtungsvektor_numerisch, beobachtungsvektor_naeherung_numerisch_iteration0), stoch_modell)[1]"
|
||||
],
|
||||
"id": "f53849ee4757d5e8",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Von Fabian\n",
|
||||
"\n",
|
||||
"importlib.reload(Funktionales_Modell)\n",
|
||||
"fm = Funktionales_Modell.FunktionalesModell(pfad_datenbank, a, b)\n",
|
||||
"importlib.reload(Export)\n",
|
||||
"importlib.reload(Datenbank)\n",
|
||||
"\n",
|
||||
"unbekanntenvektor_symbolisch = (fm.unbekanntenvektor_symbolisch(Jacobimatrix_symbolisch_liste_unbekannte))\n",
|
||||
"unbekanntenvektor_numerisch_iteration0 = fm.unbekanntenvektor_numerisch(Jacobimatrix_symbolisch_liste_unbekannte, unbekanntenvektor_symbolisch)\n",
|
||||
"print(unbekanntenvektor_numerisch_iteration0)\n",
|
||||
"print(\"-----\")\n",
|
||||
"unbekanntenvektor_numerisch = fm.unbekanntenvektor_numerisch(Jacobimatrix_symbolisch_liste_unbekannte, unbekanntenvektor_symbolisch, dx, unbekanntenvektor_numerisch_iteration0)\n",
|
||||
"print(unbekanntenvektor_numerisch)"
|
||||
],
|
||||
"id": "122dca077d1d267c",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"outputs": [],
|
||||
"execution_count": null,
|
||||
"source": [
|
||||
"# Datumsfestlegung: Bitte geben Sie nachfolgend die Koordinatenkomponenten an, die das Datum definieren sollen\n",
|
||||
"\n",
|
||||
"auswahl = [\n",
|
||||
" (\"101\",\"X\"), (\"101\",\"Y\"), # Punkt 101 nur Lage\n",
|
||||
" (\"205\",\"X\"), (\"205\",\"Y\"), (\"205\",\"Z\"), # Punkt 205 voll\n",
|
||||
" (\"330\",\"Z\") # Punkt 330 nur Höhe\n",
|
||||
"]\n",
|
||||
"\n",
|
||||
"aktive_unbekannte_indices = Datumsfestlegung.datumskomponenten(auswahl, liste_punktnummern)"
|
||||
],
|
||||
"id": "c37670a07848d977"
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
||||
17
Daten/Koordinaten_OL_umliegend_bereinigt.csv
Normal file
17
Daten/Koordinaten_OL_umliegend_bereinigt.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
0645,Referenz,11/24/2025 07:59:44,3793994.4529,495758.0093,5085958.2047,0.0000,0.0000,0.0000
|
||||
0648,Referenz,11/24/2025 07:59:44,3762551.5682,538424.8576,5104809.1503,0.0000,0.0000,0.0000
|
||||
0656,Referenz,11/24/2025 07:59:44,3794838.5802,546995.3112,5080116.5503,0.0000,0.0000,0.0000
|
||||
0995,Referenz,11/24/2025 07:59:44,3794519.9177,588539.9138,5075743.9332,0.0000,0.0000,0.0000
|
||||
1675,Referenz,11/24/2025 07:59:44,3813621.0427,566004.8947,5064056.9300,0.0000,0.0000,0.0000
|
||||
ESTE,Referenz,11/24/2025 07:59:44,3816914.7110,507636.8120,5067733.4670,0.0000,0.0000,0.0000
|
||||
GNA2,Referenz,11/24/2025 07:59:44,3767530.6335,597990.0978,5094563.5073,0.0000,0.0000,0.0000
|
||||
10054,Gemittelt,11/24/2025 16:26:50,3794889.0494,547086.9874,5080038.1528,0.0030,0.0017,0.0023
|
||||
10014,Gemittelt,11/24/2025 11:45:48,3794838.7464,546812.3658,5080105.2000,0.0024,0.0010,0.0031
|
||||
10008,Gemittelt,11/24/2025 16:27:15,3794783.8581,546746.6347,5080152.7404,0.0026,0.0008,0.0031
|
||||
10059,Gemittelt,11/24/2025 14:56:52,3794736.9649,547079.4678,5080152.3224,0.0034,0.0036,0.0039
|
||||
10037,Gemittelt,11/24/2025 13:31:07,3794800.5693,546960.7477,5080117.6650,0.0034,0.0018,0.0044
|
||||
10044,Gemittelt,11/24/2025 16:26:33,3794752.6696,546958.3218,5080154.2579,0.0042,0.0013,0.0054
|
||||
10026,Gemittelt,11/24/2025 14:56:51,3794753.8595,546827.4296,5080167.0938,0.0048,0.0021,0.0058
|
||||
10001,Gemittelt,11/24/2025 11:46:05,3794901.5252,546745.5590,5080065.7672,0.0068,0.0042,0.0064
|
||||
10002,Gemittelt,11/24/2025 13:31:18,3794866.9711,546729.5958,5080092.6364,0.0087,0.0025,0.0117
|
||||
10028,Gemittelt,11/24/2025 16:26:51,3794889.7348,546908.7636,5080056.9381,0.0105,0.0026,0.0135
|
||||
|
242
Datenbank.py
242
Datenbank.py
@@ -1,6 +1,9 @@
|
||||
import os
|
||||
import sqlite3
|
||||
import sympy as sp
|
||||
from Berechnungen import Einheitenumrechnung
|
||||
from decimal import Decimal
|
||||
|
||||
|
||||
|
||||
class Datenbank_anlegen:
|
||||
@@ -21,6 +24,12 @@ class Datenbank_anlegen:
|
||||
naeherungx_us NUMERIC(9,3),
|
||||
naeherungy_us NUMERIC(7,3),
|
||||
naeherungz_us NUMERIC(8,3),
|
||||
datumskoordinate_x INTEGER DEFAULT 0,
|
||||
datumskoordinate_y INTEGER DEFAULT 0,
|
||||
datumskoordinate_z INTEGER DEFAULT 0,
|
||||
stabw_vorinfo_x NUMERIC(3, 8),
|
||||
stabw_vorinfo_y NUMERIC(3, 8),
|
||||
stabw_vorinfo_z NUMERIC(3, 8),
|
||||
CONSTRAINT pk_Netzpunkte PRIMARY KEY (punktnummer)
|
||||
);
|
||||
""");
|
||||
@@ -33,8 +42,24 @@ class Datenbank_anlegen:
|
||||
tachymeter_richtung NUMERIC(8, 6),
|
||||
tachymeter_zenitwinkel NUMERIC(8, 6),
|
||||
tachymeter_distanz NUMERIC(8, 4),
|
||||
gnss_bx NUMERIC(7, 4),
|
||||
gnss_by NUMERIC(7, 4),
|
||||
gnss_bz NUMERIC(7, 4),
|
||||
gnss_m0 NUMERIC(1, 8),
|
||||
gnss_q11 NUMERIC(1, 8),
|
||||
gnss_q12 NUMERIC(1, 8),
|
||||
gnss_q13 NUMERIC(1, 8),
|
||||
gnss_q21 NUMERIC(1, 8),
|
||||
gnss_q22 NUMERIC(1, 8),
|
||||
gnss_q23 NUMERIC(1, 8),
|
||||
gnss_q31 NUMERIC(1, 8),
|
||||
gnss_q32 NUMERIC(1, 8),
|
||||
gnss_q33 NUMERIC(1, 8),
|
||||
dateiname TEXT(200),
|
||||
CONSTRAINT pk_Beobachtunen PRIMARY KEY (beobachtungenID)
|
||||
CONSTRAINT pk_Beobachtungen PRIMARY KEY (beobachtungenID),
|
||||
CONSTRAINT fk_Beobachtungen_Netzpunktesp FOREIGN KEY (punktnummer_sp) REFERENCES Netzpunkte(punktnummer),
|
||||
CONSTRAINT fk_Beobachtungen_Netzpunktezp FOREIGN KEY (punktnummer_zp) REFERENCES Netzpunkte(punktnummer),
|
||||
CONSTRAINT fk_Beobachtungen_Instrumente FOREIGN KEY (instrumenteID) REFERENCES Instrumente(instrumenteID)
|
||||
);
|
||||
""");
|
||||
cursor.executescript("""CREATE TABLE Instrumente(
|
||||
@@ -44,6 +69,16 @@ class Datenbank_anlegen:
|
||||
CONSTRAINT pk_Instrumente PRIMARY KEY (instrumenteID)
|
||||
);
|
||||
""")
|
||||
cursor.executescript("""CREATE TABLE Genauigkeiten(
|
||||
genauigkeitenID INTEGER,
|
||||
instrumenteID INTEGER,
|
||||
beobachtungsart TEXT(25),
|
||||
stabw_apriori_konstant NUMERIC(3, 8),
|
||||
stabw_apriori_streckenprop NUMERIC(3, 8),
|
||||
CONSTRAINT pk_Genauigkeiten PRIMARY KEY (genauigkeitenID),
|
||||
CONSTRAINT fk_Genauigkeiten_Instrumente FOREIGN KEY (instrumenteID) REFERENCES Instrumente(instrumenteID)
|
||||
);
|
||||
""")
|
||||
con.commit()
|
||||
cursor.close()
|
||||
con.close()
|
||||
@@ -71,7 +106,14 @@ class Datenbankzugriff:
|
||||
if koordinate[1] is not None and koordinate[2] is not None and koordinate[3] is not None
|
||||
]
|
||||
if ausgabeart == "Dict":
|
||||
return {koordinate[0]: sp.Matrix([float(koordinate[1]), float(koordinate[2]), float(koordinate[3])]) for koordinate in liste_koordinaten}
|
||||
return {
|
||||
koordinate[0]: sp.Matrix([
|
||||
sp.Float(str(koordinate[1])),
|
||||
sp.Float(str(koordinate[2])),
|
||||
sp.Float(str(koordinate[3]))
|
||||
])
|
||||
for koordinate in liste_koordinaten
|
||||
}
|
||||
|
||||
def set_koordinaten(self, dict_koordinaten, koordinatenart):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
@@ -114,7 +156,168 @@ class Datenbankzugriff:
|
||||
cursor.close()
|
||||
con.close()
|
||||
|
||||
def get_instrument(self, typ):
|
||||
def set_genauigkeiten(self, instrumenteID, beobachtungsart, stabw_apriori_konstant =None, stabw_apriori_streckenprop =None):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
|
||||
instrumentenname = cursor.execute("SELECT name FROM Instrumente WHERE instrumenteID = ?",
|
||||
(instrumenteID, )).fetchone()
|
||||
if instrumentenname is None:
|
||||
print(
|
||||
f"Die InstumentenID {instrumenteID} ist in der Datenbank nicht vorhanden. Bitte zuerst das Instrument hinzufügen.")
|
||||
cursor.close()
|
||||
con.close()
|
||||
return
|
||||
instrumentenname = instrumentenname[0]
|
||||
|
||||
if stabw_apriori_konstant is None and stabw_apriori_streckenprop is None:
|
||||
print(
|
||||
"Es wurden keine Genauigkeiten importiert. Bitte stabw_apriori_konstant und / oder stabw_apriori_streckenprop angeben.")
|
||||
cursor.close()
|
||||
con.close()
|
||||
return
|
||||
|
||||
if beobachtungsart == "Tachymeter_Richtung" or beobachtungsart == "Tachymeter_Zenitwinkel" :
|
||||
stabw_apriori_konstant = Einheitenumrechnung.mgon_to_rad_Decimal(stabw_apriori_konstant)
|
||||
|
||||
if beobachtungsart == "Tachymeter_Strecke":
|
||||
stabw_apriori_konstant = Einheitenumrechnung.mm_to_m(stabw_apriori_konstant)
|
||||
|
||||
if isinstance(stabw_apriori_konstant, Decimal):
|
||||
stabw_apriori_konstant = float(stabw_apriori_konstant)
|
||||
|
||||
if isinstance(stabw_apriori_streckenprop, Decimal):
|
||||
stabw_apriori_streckenprop = float(stabw_apriori_streckenprop)
|
||||
|
||||
sql = "SELECT 1 FROM Genauigkeiten WHERE instrumenteID = ? AND beobachtungsart = ?"
|
||||
params = [instrumenteID, beobachtungsart]
|
||||
|
||||
if stabw_apriori_konstant is None:
|
||||
sql += " AND stabw_apriori_konstant IS NULL"
|
||||
else:
|
||||
sql += " AND stabw_apriori_konstant = ?"
|
||||
params.append(stabw_apriori_konstant)
|
||||
|
||||
if stabw_apriori_streckenprop is None:
|
||||
sql += " AND stabw_apriori_streckenprop IS NULL"
|
||||
else:
|
||||
sql += " AND stabw_apriori_streckenprop = ?"
|
||||
params.append(stabw_apriori_streckenprop)
|
||||
|
||||
liste_genauigkeiten = cursor.execute(sql, tuple(params)).fetchall()
|
||||
|
||||
if liste_genauigkeiten == []:
|
||||
if stabw_apriori_konstant is not None and stabw_apriori_streckenprop is not None:
|
||||
cursor.execute(
|
||||
"INSERT INTO Genauigkeiten (instrumenteID, beobachtungsart, stabw_apriori_konstant, stabw_apriori_streckenprop) VALUES (?, ?, ?, ?)",
|
||||
(instrumenteID, beobachtungsart, stabw_apriori_konstant, stabw_apriori_streckenprop)
|
||||
)
|
||||
print(
|
||||
f"Die Genauigkeitsangabe für die Beobachtungsart {beobachtungsart} des Instrumentes {instrumentenname} wurde erfolgreich hinzugefügt.")
|
||||
|
||||
elif stabw_apriori_konstant is None and stabw_apriori_streckenprop is not None:
|
||||
cursor.execute(
|
||||
"INSERT INTO Genauigkeiten (instrumenteID, beobachtungsart, stabw_apriori_streckenprop) VALUES (?, ?, ?)",
|
||||
(instrumenteID, beobachtungsart, stabw_apriori_streckenprop)
|
||||
)
|
||||
print(
|
||||
f"Die Genauigkeitsangabe für die Beobachtungsart {beobachtungsart} des Instrumentes {instrumentenname} wurde erfolgreich hinzugefügt.")
|
||||
|
||||
elif stabw_apriori_streckenprop is None and stabw_apriori_konstant is not None:
|
||||
cursor.execute(
|
||||
"INSERT INTO Genauigkeiten (instrumenteID, beobachtungsart, stabw_apriori_konstant) VALUES (?, ?, ?)",
|
||||
(instrumenteID, beobachtungsart, stabw_apriori_konstant)
|
||||
)
|
||||
print(
|
||||
f"Die Genauigkeitsangabe für die Beobachtungsart {beobachtungsart} des Instrumentes {instrumentenname} wurde erfolgreich hinzugefügt.")
|
||||
else:
|
||||
print("Die Genauigkeitsangabe ist bereits in der Datenbank vorhanden.")
|
||||
|
||||
con.commit()
|
||||
cursor.close()
|
||||
con.close()
|
||||
|
||||
def set_datumskoordinaten(self, liste_datumskoordinaten_x, liste_datumskoordinaten_y, liste_datumskoordinaten_z, liste_datumskoordinaten_x_y_z):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
|
||||
liste_stabw_vorinfo_x = [str(row[0]).strip() for row in cursor.execute(
|
||||
"SELECT punktnummer FROM Netzpunkte WHERE stabw_vorinfo_x IS NOT NULL").fetchall()]
|
||||
liste_stabw_vorinfo_y = [str(row[0]).strip() for row in cursor.execute(
|
||||
"SELECT punktnummer FROM Netzpunkte WHERE stabw_vorinfo_y IS NOT NULL").fetchall()]
|
||||
liste_stabw_vorinfo_z = [str(row[0]).strip() for row in cursor.execute(
|
||||
"SELECT punktnummer FROM Netzpunkte WHERE stabw_vorinfo_z IS NOT NULL").fetchall()]
|
||||
liste_stabw_vorinfo_x_y_z = [str(row[0]).strip() for row in cursor.execute(
|
||||
"SELECT punktnummer FROM Netzpunkte WHERE stabw_vorinfo_x IS NOT NULL AND stabw_vorinfo_y IS NOT NULL AND stabw_vorinfo_z IS NOT NULL").fetchall()]
|
||||
|
||||
if liste_datumskoordinaten_x != []:
|
||||
for punktnummer in liste_datumskoordinaten_x:
|
||||
punktnummer = str(punktnummer).strip()
|
||||
if punktnummer in liste_stabw_vorinfo_x:
|
||||
cursor.execute(f"UPDATE Netzpunkte SET datumskoordinate_x = 1 WHERE punktnummer = ? AND stabw_vorinfo_x IS NOT NULL", (str(punktnummer),))
|
||||
else:
|
||||
print(f"Die X-Koordinate des Punktes {punktnummer} wurde nicht in eine Datumskoordinate geändert, weil keine Vorinformationen zur Standardabweichung der X-Koordinate des Punktes vorliegen. Diese bitte zuerst erfassen und Datumsdefinition wiederholen.")
|
||||
if liste_datumskoordinaten_y != []:
|
||||
for punktnummer in liste_datumskoordinaten_y:
|
||||
punktnummer = str(punktnummer).strip()
|
||||
if punktnummer in liste_stabw_vorinfo_y:
|
||||
cursor.execute(f"UPDATE Netzpunkte SET datumskoordinate_y = 1 WHERE punktnummer = ? AND stabw_vorinfo_y IS NOT NULL", (str(punktnummer),))
|
||||
else:
|
||||
print(f"Die Y-Koordinate des Punktes {punktnummer} wurde nicht in eine Datumskoordinate geändert, weil keine Vorinformationen zur Standardabweichung der Y-Koordinate des Punktes vorliegen. Diese bitte zuerst erfassen und Datumsdefinition wiederholen.")
|
||||
|
||||
if liste_datumskoordinaten_z != []:
|
||||
for punktnummer in liste_datumskoordinaten_z:
|
||||
punktnummer = str(punktnummer).strip()
|
||||
if punktnummer in liste_stabw_vorinfo_z:
|
||||
cursor.execute(f"UPDATE Netzpunkte SET datumskoordinate_z = 1 WHERE punktnummer = ? AND stabw_vorinfo_z IS NOT NULL", (str(punktnummer),))
|
||||
else:
|
||||
print(f"Die Z-Koordinate des Punktes {punktnummer} wurde nicht in eine Datumskoordinate geändert, weil keine Vorinformationen zur Standardabweichung der Z-Koordinate des Punktes vorliegen. Diese bitte zuerst erfassen und Datumsdefinition wiederholen.")
|
||||
|
||||
if liste_datumskoordinaten_x_y_z != []:
|
||||
for punktnummer in liste_datumskoordinaten_x_y_z:
|
||||
punktnummer = str(punktnummer).strip()
|
||||
if punktnummer in liste_stabw_vorinfo_x_y_z:
|
||||
cursor.execute(f"UPDATE Netzpunkte SET datumskoordinate_x = 1, datumskoordinate_y = 1, datumskoordinate_z = 1 WHERE punktnummer = ? AND stabw_vorinfo_x IS NOT NULL AND stabw_vorinfo_y IS NOT NULL AND stabw_vorinfo_z IS NOT NULL", (str(punktnummer),))
|
||||
else:
|
||||
print(f"Der Punkt {punktnummer} wurde nicht in einen Datumspunkt geändert, weil nicht alle Vorinformationen zur Standardabweichung der Koordinaten des Punktes vorliegen. Diese bitte zuerst erfassen und Datumsdefinition wiederholen.")
|
||||
|
||||
con.commit()
|
||||
cursor.close()
|
||||
con.close()
|
||||
|
||||
def set_datumskoordinaten_to_neupunkte(self, liste_datumskoordinaten_x, liste_datumskoordinaten_y, liste_datumskoordinaten_z, liste_datumskoordinaten_x_y_z):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
if liste_datumskoordinaten_x != []:
|
||||
for punktnummer in liste_datumskoordinaten_x:
|
||||
punktnummer = str(punktnummer).strip()
|
||||
cursor.execute(
|
||||
f"UPDATE Netzpunkte SET datumskoordinate_x = 0 WHERE punktnummer = ?",
|
||||
(str(punktnummer),))
|
||||
if liste_datumskoordinaten_y != []:
|
||||
for punktnummer in liste_datumskoordinaten_y:
|
||||
punktnummer = str(punktnummer).strip()
|
||||
cursor.execute(
|
||||
f"UPDATE Netzpunkte SET datumskoordinate_y = 0 WHERE punktnummer = ?",
|
||||
(str(punktnummer),))
|
||||
if liste_datumskoordinaten_z != []:
|
||||
for punktnummer in liste_datumskoordinaten_z:
|
||||
punktnummer = str(punktnummer).strip()
|
||||
cursor.execute(
|
||||
f"UPDATE Netzpunkte SET datumskoordinate_z = 0 WHERE punktnummer = ?",
|
||||
(str(punktnummer),))
|
||||
if liste_datumskoordinaten_x_y_z != []:
|
||||
for punktnummer in liste_datumskoordinaten_x_y_z:
|
||||
punktnummer = str(punktnummer).strip()
|
||||
cursor.execute(
|
||||
f"UPDATE Netzpunkte SET datumskoordinate_x = 0, datumskoordinate_y = 0, datumskoordinate_z = 0 WHERE punktnummer = ?",
|
||||
(str(punktnummer),))
|
||||
|
||||
con.commit()
|
||||
cursor.close()
|
||||
con.close()
|
||||
|
||||
def get_instrument_liste(self, typ):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
liste_instrumente = cursor.execute("SELECT * FROM Instrumente WHERE typ = ?", (typ,)).fetchall()
|
||||
@@ -125,10 +328,41 @@ class Datenbankzugriff:
|
||||
liste_instrumente = f"Kein Instrument vom Typ {typ} gefunden. Folgende Typen stehen aktuell zur Auswahl: {liste_typen}"
|
||||
return liste_instrumente
|
||||
|
||||
def get_genauigkeiten_dict(self):
|
||||
dict = {}
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
liste_genauigkeiten = cursor.execute("SELECT * FROM Genauigkeiten").fetchall()
|
||||
for genauigkeit in liste_genauigkeiten:
|
||||
dict[genauigkeit[0]] = genauigkeit[1:]
|
||||
cursor.close()
|
||||
con.close()
|
||||
return dict
|
||||
|
||||
def get_instrumenteID_beobachtungenID_dict(self):
|
||||
dict = {}
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
liste_beobachtungen_instrumente = cursor.execute("SELECT beobachtungenID, instrumenteID FROM Beobachtungen").fetchall()
|
||||
for i in liste_beobachtungen_instrumente:
|
||||
dict[i[0]] = i[1]
|
||||
cursor.close()
|
||||
con.close()
|
||||
return dict
|
||||
|
||||
def get_beobachtungen_id_beobachtungsgruppe_standpunkt_zielpunkt(self, beobachtungsart):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
liste_beobachtungen = cursor.execute(f"SELECT beobachtungenID, beobachtungsgruppeID, punktnummer_sp, punktnummer_zp FROM Beobachtungen WHERE {beobachtungsart} IS NOT NULL").fetchall()
|
||||
cursor.close()
|
||||
con.close()
|
||||
return liste_beobachtungen
|
||||
return liste_beobachtungen
|
||||
|
||||
def get_beobachtungen_from_beobachtungenid(self):
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
liste_beobachtungen = cursor.execute(f"SELECT punktnummer_sp, punktnummer_zp, beobachtungenID, beobachtungsgruppeID, tachymeter_richtung, tachymeter_zenitwinkel, tachymeter_distanz FROM Beobachtungen").fetchall()
|
||||
cursor.close()
|
||||
con.close()
|
||||
return liste_beobachtungen
|
||||
|
||||
|
||||
154
Datumsfestlegung.py
Normal file
154
Datumsfestlegung.py
Normal file
@@ -0,0 +1,154 @@
|
||||
import sympy as sp
|
||||
from typing import Iterable, List, Sequence, Tuple, Optional
|
||||
|
||||
|
||||
class Datumsfestlegung:
|
||||
|
||||
@staticmethod
|
||||
def datumskomponenten(
|
||||
auswahl: Iterable[Tuple[str, str]],
|
||||
liste_punktnummern: Sequence[str],
|
||||
*,
|
||||
layout: str = "XYZ"
|
||||
) -> List[int]:
|
||||
punkt2pos = {str(p): i for i, p in enumerate(liste_punktnummern)}
|
||||
|
||||
layout = layout.upper()
|
||||
if layout != "XYZ":
|
||||
raise ValueError("Nur layout='XYZ' unterstützt (wie bei euch).")
|
||||
comp2off = {"X": 0, "Y": 1, "Z": 2}
|
||||
|
||||
aktive: List[int] = []
|
||||
for pt, comp in auswahl:
|
||||
spt = str(pt)
|
||||
c = comp.upper()
|
||||
if spt not in punkt2pos:
|
||||
raise KeyError(f"Punkt '{pt}' nicht in liste_punktnummern.")
|
||||
if c not in comp2off:
|
||||
raise ValueError(f"Komponente '{comp}' ungültig. Nur X,Y,Z.")
|
||||
p = punkt2pos[spt]
|
||||
aktive.append(3 * p + comp2off[c])
|
||||
|
||||
# Duplikate entfernen
|
||||
out, seen = [], set()
|
||||
for i in aktive:
|
||||
if i not in seen:
|
||||
seen.add(i)
|
||||
out.append(i)
|
||||
return out
|
||||
|
||||
@staticmethod
|
||||
def auswahlmatrix_E(u: int, aktive_unbekannte_indices: Iterable[int]) -> sp.Matrix:
|
||||
E = sp.zeros(u, u)
|
||||
for idx in aktive_unbekannte_indices:
|
||||
i = int(idx)
|
||||
if not (0 <= i < u):
|
||||
raise IndexError(f"Aktiver Index {i} außerhalb [0,{u-1}]")
|
||||
E[i, i] = 1
|
||||
return E
|
||||
|
||||
@staticmethod
|
||||
def raenderungsmatrix_G(
|
||||
x0: sp.Matrix,
|
||||
liste_punktnummern: Sequence[str],
|
||||
*,
|
||||
mit_massstab: bool = True,
|
||||
layout: str = "XYZ",
|
||||
) -> sp.Matrix:
|
||||
if x0.cols != 1:
|
||||
raise ValueError("x0 muss Spaltenvektor sein.")
|
||||
layout = layout.upper()
|
||||
if layout != "XYZ":
|
||||
raise ValueError("Nur layout='XYZ' unterstützt (wie bei euch).")
|
||||
|
||||
nP = len(liste_punktnummern)
|
||||
u = x0.rows
|
||||
d = 7 if mit_massstab else 6
|
||||
G = sp.zeros(u, d)
|
||||
|
||||
for p in range(nP):
|
||||
ix, iy, iz = 3*p, 3*p+1, 3*p+2
|
||||
xi, yi, zi = x0[ix, 0], x0[iy, 0], x0[iz, 0]
|
||||
|
||||
# Translationen
|
||||
G[ix, 0] = 1
|
||||
G[iy, 1] = 1
|
||||
G[iz, 2] = 1
|
||||
|
||||
# Rotationen
|
||||
G[iy, 3] = -zi; G[iz, 3] = yi # Rx
|
||||
G[ix, 4] = zi; G[iz, 4] = -xi # Ry
|
||||
G[ix, 5] = -yi; G[iy, 5] = xi # Rz
|
||||
|
||||
# Maßstab
|
||||
if mit_massstab:
|
||||
G[ix, 6] = xi
|
||||
G[iy, 6] = yi
|
||||
G[iz, 6] = zi
|
||||
|
||||
return G
|
||||
|
||||
@staticmethod
|
||||
def berechne_dx_geraendert(N: sp.Matrix, n: sp.Matrix, Gi: sp.Matrix) -> sp.Matrix:
|
||||
if N.rows != N.cols:
|
||||
raise ValueError("N muss quadratisch sein.")
|
||||
if n.cols != 1:
|
||||
raise ValueError("n muss Spaltenvektor sein.")
|
||||
if Gi.rows != N.rows:
|
||||
raise ValueError("Gi hat falsche Zeilenzahl.")
|
||||
|
||||
u = N.rows
|
||||
d = Gi.cols
|
||||
K = N.row_join(Gi)
|
||||
K = K.col_join(Gi.T.row_join(sp.zeros(d, d)))
|
||||
rhs = n.col_join(sp.zeros(d, 1))
|
||||
sol = K.LUsolve(rhs)
|
||||
return sol[:u, :]
|
||||
|
||||
@staticmethod
|
||||
def weiches_datum(
|
||||
A: sp.Matrix,
|
||||
dl: sp.Matrix,
|
||||
Q_ll: sp.Matrix,
|
||||
x0: sp.Matrix,
|
||||
anschluss_indices: Sequence[int],
|
||||
anschluss_werte: sp.Matrix,
|
||||
Sigma_AA: Optional[sp.Matrix] = None,
|
||||
) -> Tuple[sp.Matrix, sp.Matrix, sp.Matrix]:
|
||||
if dl.cols != 1 or x0.cols != 1:
|
||||
raise ValueError("dl und x0 müssen Spaltenvektoren sein.")
|
||||
if A.rows != dl.rows:
|
||||
raise ValueError("A.rows muss dl.rows entsprechen.")
|
||||
if A.cols != x0.rows:
|
||||
raise ValueError("A.cols muss x0.rows entsprechen.")
|
||||
if Q_ll.rows != Q_ll.cols or Q_ll.rows != A.rows:
|
||||
raise ValueError("Q_ll muss (n×n) sein und zu A.rows passen.")
|
||||
|
||||
u = A.cols
|
||||
idx = [int(i) for i in anschluss_indices]
|
||||
m = len(idx)
|
||||
|
||||
if anschluss_werte.cols != 1 or anschluss_werte.rows != m:
|
||||
raise ValueError("anschluss_werte muss (m×1) sein.")
|
||||
if Sigma_AA is None:
|
||||
Sigma_AA = sp.eye(m)
|
||||
if Sigma_AA.rows != m or Sigma_AA.cols != m:
|
||||
raise ValueError("Sigma_AA muss (m×m) sein.")
|
||||
|
||||
A_A = sp.zeros(m, u)
|
||||
for r, j in enumerate(idx):
|
||||
if not (0 <= j < u):
|
||||
raise IndexError(f"Anschluss-Index {j} außerhalb [0,{u-1}]")
|
||||
A_A[r, j] = 1
|
||||
|
||||
x0_A = sp.Matrix([[x0[j, 0]] for j in idx])
|
||||
dl_A = anschluss_werte - x0_A
|
||||
|
||||
A_ext = A.col_join(A_A)
|
||||
dl_ext = dl.col_join(dl_A)
|
||||
|
||||
Q_ext = sp.zeros(Q_ll.rows + m, Q_ll.cols + m)
|
||||
Q_ext[:Q_ll.rows, :Q_ll.cols] = Q_ll
|
||||
Q_ext[Q_ll.rows:, Q_ll.cols:] = Sigma_AA
|
||||
|
||||
return A_ext, dl_ext, Q_ext
|
||||
@@ -1,18 +0,0 @@
|
||||
import sympy as sp
|
||||
|
||||
class Einheitenumrechnung:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def mas_to_rad(mas):
|
||||
umrechnungsfaktor = 1 / 1000 * 1 / 3600 * sp.pi / 180
|
||||
grad = mas * umrechnungsfaktor
|
||||
return grad
|
||||
|
||||
def mm_to_m(mm):
|
||||
m = mm / 1000
|
||||
return m
|
||||
|
||||
def ppb(ppb):
|
||||
ppb *= 10 ** (-9)
|
||||
return ppb
|
||||
60
Export.py
Normal file
60
Export.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import csv
|
||||
|
||||
class Export:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def matrix_to_csv(dateiname, liste_spaltenbeschriftung, liste_zeilenbeschriftung, Matrix, beschriftung_kopfzeile = ""):
|
||||
with open(dateiname, "w", newline="", encoding="utf-8") as csvfile:
|
||||
writer = csv.writer(csvfile, delimiter=";")
|
||||
|
||||
kopfzeile = [beschriftung_kopfzeile]
|
||||
for spaltenbeschriftung in liste_spaltenbeschriftung:
|
||||
kopfzeile.append(str(spaltenbeschriftung))
|
||||
writer.writerow(kopfzeile)
|
||||
|
||||
for zeilenbeschriftung, zeile in zip(liste_zeilenbeschriftung, Matrix.tolist()):
|
||||
zeile_als_text = [zeilenbeschriftung]
|
||||
for eintrag in zeile:
|
||||
try:
|
||||
eintrag_text = str(eintrag).replace(".", ",")
|
||||
except Exception:
|
||||
eintrag_text = str(eintrag)
|
||||
zeile_als_text.append(eintrag_text)
|
||||
writer.writerow(zeile_als_text)
|
||||
|
||||
@staticmethod
|
||||
def ausgleichung_to_datei(dateiname, dict_ausgleichung):
|
||||
with open(dateiname, "w", newline="", encoding="utf-8") as csvfile:
|
||||
writer = csv.writer(csvfile, delimiter=";")
|
||||
|
||||
writer.writerow(["Parameter", "Wert"])
|
||||
|
||||
for key, value in dict_ausgleichung.items():
|
||||
|
||||
if hasattr(value, "tolist"):
|
||||
rows = value.rows
|
||||
cols = value.cols
|
||||
|
||||
writer.writerow([key, f"Matrix {rows}x{cols}"])
|
||||
|
||||
for i, zeile in enumerate(value.tolist()):
|
||||
zeile_als_text = [f"{key}_zeile_{i+1}"]
|
||||
for eintrag in zeile:
|
||||
try:
|
||||
eintrag_float = float(eintrag)
|
||||
eintrag_text = f"{eintrag_float}".replace(".", ",")
|
||||
except Exception:
|
||||
eintrag_text = str(eintrag)
|
||||
zeile_als_text.append(eintrag_text)
|
||||
writer.writerow(zeile_als_text)
|
||||
|
||||
else:
|
||||
try:
|
||||
value_float = float(value)
|
||||
value_text = f"{value_float}".replace(".", ",")
|
||||
except Exception:
|
||||
value_text = str(value)
|
||||
|
||||
writer.writerow([key, value_text])
|
||||
@@ -1,30 +1,26 @@
|
||||
from Datenbank import *
|
||||
import sympy as sp
|
||||
import csv
|
||||
from Export import Export
|
||||
from Berechnungen import Berechnungen
|
||||
|
||||
|
||||
class FunktionalesModell:
|
||||
def __init__(self, pfad_datenbank):
|
||||
def __init__(self, pfad_datenbank, a, b):
|
||||
self.pfad_datenbank = pfad_datenbank
|
||||
self.berechnungen = Berechnungen(a, b)
|
||||
self.substitutionen_dict = self.dict_substitutionen_uebergeordnetes_system()
|
||||
self.dict_punkt_symbole = {}
|
||||
|
||||
def jacobi_matrix_symbolisch(self):
|
||||
liste_beobachtungsarten = ["tachymeter_distanz", "tachymeter_richtung", "tachymeter_zenitwinkel"]
|
||||
#liste_beobachtungsarten = ["tachymeter_distanz", "tachymeter_richtung"]
|
||||
db_zugriff = Datenbankzugriff(self.pfad_datenbank)
|
||||
|
||||
liste_beobachtungen_rohdaten = []
|
||||
liste_punktnummern =[]
|
||||
|
||||
liste_beobachtungen_jacobian = []
|
||||
liste_orientierungsunbekannte = []
|
||||
|
||||
liste_beobachtungsgleichungen_jacobian = []
|
||||
liste_beobachtungsgleichungen_abgeleitet = []
|
||||
|
||||
liste_beobachtungen_abgeleitet = []
|
||||
liste_zeilenbeschriftungen_jacobian = []
|
||||
|
||||
liste_zeilenbeschriftungen_abgeleitet = []
|
||||
|
||||
for beobachtungsart in liste_beobachtungsarten:
|
||||
liste_id_standpunkt_zielpunkt = db_zugriff.get_beobachtungen_id_beobachtungsgruppe_standpunkt_zielpunkt(beobachtungsart)
|
||||
|
||||
@@ -44,12 +40,262 @@ class FunktionalesModell:
|
||||
if liste_beobachtungen_rohdaten == []:
|
||||
return None
|
||||
|
||||
dict_punkt_symbole = {}
|
||||
#dict_punkt_symbole = {}
|
||||
liste_unbekannte = []
|
||||
|
||||
for punkt in liste_punktnummern:
|
||||
X, Y, Z = sp.symbols(f"X{punkt} Y{punkt} Z{punkt}")
|
||||
dict_punkt_symbole[punkt] = (X, Y, Z)
|
||||
self.dict_punkt_symbole[punkt] = (X, Y, Z)
|
||||
liste_unbekannte.append(X)
|
||||
liste_unbekannte.append(Y)
|
||||
liste_unbekannte.append(Z)
|
||||
|
||||
dict_orientierung_symbole = {}
|
||||
for orientierungsunbekannte in liste_orientierungsunbekannte:
|
||||
O = sp.symbols(f"O{orientierungsunbekannte}")
|
||||
dict_orientierung_symbole[orientierungsunbekannte] = O
|
||||
liste_unbekannte.append(O)
|
||||
|
||||
liste_beobachtungsgleichungen_distanz =[]
|
||||
liste_zeilenbeschriftungen_distanz = []
|
||||
|
||||
liste_A_richtung_zeilen = []
|
||||
liste_zeilenbeschriftungen_richtung = []
|
||||
liste_A_zenitwinkel_zeilen = []
|
||||
liste_zeilenbeschriftungen_zenitwinkel = []
|
||||
|
||||
|
||||
for beobachtungsart, beobachtungenID, beobachtungsgruppeID, standpunkt, zielpunkt in liste_beobachtungen_rohdaten:
|
||||
X_sp, Y_sp, Z_sp = self.dict_punkt_symbole[standpunkt]
|
||||
X_zp, Y_zp, Z_zp = self.dict_punkt_symbole[zielpunkt]
|
||||
B_sp, L_sp = sp.symbols(f"B{standpunkt} L{standpunkt}")
|
||||
|
||||
# Symbole für die Beobachtungswerte (werden später numerisch substituiert)
|
||||
alpha = sp.symbols(f"{beobachtungenID}_R_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
zw = sp.symbols(f"{beobachtungenID}_ZW_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
s = sp.symbols(f"{beobachtungenID}_SD_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
|
||||
if beobachtungsart == "tachymeter_distanz":
|
||||
beobachtungsgleichung = sp.sqrt((X_zp - X_sp) ** 2 + (Y_zp - Y_sp) ** 2 + (Z_zp - Z_sp) ** 2)
|
||||
liste_beobachtungsgleichungen_distanz.append(beobachtungsgleichung)
|
||||
liste_zeilenbeschriftungen_distanz.append(
|
||||
f"{beobachtungenID}_SD_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
|
||||
|
||||
|
||||
elif beobachtungsart == "tachymeter_richtung":
|
||||
|
||||
# Richtung nach Otepka: r = a12 + O
|
||||
|
||||
# dB und dL werden bewusst weggelassen
|
||||
|
||||
dX = X_zp - X_sp
|
||||
|
||||
dY = Y_zp - Y_sp
|
||||
|
||||
dZ = Z_zp - Z_sp
|
||||
|
||||
# Lokales System: x_loc = Nord, y_loc = Ost
|
||||
|
||||
x_loc = (-sp.sin(B_sp) * sp.cos(L_sp)) * dX + (-sp.sin(B_sp) * sp.sin(L_sp)) * dY + (sp.cos(B_sp)) * dZ
|
||||
|
||||
y_loc = (-sp.sin(L_sp)) * dX + (sp.cos(L_sp)) * dY
|
||||
|
||||
# Otepka-Nenner: s12 * sin(zw12) = sqrt(x_loc^2 + y_loc^2)
|
||||
|
||||
s_horiz = sp.sqrt(x_loc ** 2 + y_loc ** 2)
|
||||
|
||||
# sin(t12), cos(t12) im Horizontsystem (t12 = Azimut, rechtsdrehend, Bezug Nord)
|
||||
|
||||
sin_t = y_loc / s_horiz
|
||||
|
||||
cos_t = x_loc / s_horiz
|
||||
|
||||
# Partielle Ableitungen nach Otepka (15) ohne dB und dL
|
||||
|
||||
d_r_dX_zp = (sp.sin(B_sp) * sp.cos(L_sp) * sin_t - sp.sin(L_sp) * cos_t) / s_horiz
|
||||
|
||||
d_r_dY_zp = (sp.sin(B_sp) * sp.sin(L_sp) * sin_t + sp.cos(L_sp) * cos_t) / s_horiz
|
||||
|
||||
d_r_dZ_zp = (-sp.cos(B_sp) * sin_t) / s_horiz
|
||||
|
||||
# Standpunkt-Ableitungen (SP) = negatives Vorzeichen
|
||||
|
||||
d_r_dX_sp, d_r_dY_sp, d_r_dZ_sp = -d_r_dX_zp, -d_r_dY_zp, -d_r_dZ_zp
|
||||
|
||||
# Orientierung: r = a + O => ∂r/∂O = -1
|
||||
|
||||
d_r_dO_sp = -1
|
||||
|
||||
zeile_A_Matrix = []
|
||||
|
||||
for punkt in liste_punktnummern:
|
||||
|
||||
if punkt == standpunkt:
|
||||
|
||||
zeile_A_Matrix.extend([d_r_dX_sp, d_r_dY_sp, d_r_dZ_sp])
|
||||
|
||||
elif punkt == zielpunkt:
|
||||
|
||||
zeile_A_Matrix.extend([d_r_dX_zp, d_r_dY_zp, d_r_dZ_zp])
|
||||
|
||||
else:
|
||||
|
||||
zeile_A_Matrix.extend([0, 0, 0])
|
||||
|
||||
for orientierung in liste_orientierungsunbekannte:
|
||||
zeile_A_Matrix.append(d_r_dO_sp if orientierung == beobachtungsgruppeID else 0)
|
||||
|
||||
liste_A_richtung_zeilen.append(zeile_A_Matrix)
|
||||
|
||||
liste_zeilenbeschriftungen_richtung.append(
|
||||
|
||||
f"{beobachtungenID}_R_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}"
|
||||
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
elif beobachtungsart == "tachymeter_zenitwinkel":
|
||||
|
||||
# Zenitwinkel nach Otepka (16), dB und dL bewusst weggelassen
|
||||
|
||||
dX = X_zp - X_sp
|
||||
|
||||
dY = Y_zp - Y_sp
|
||||
|
||||
dZ = Z_zp - Z_sp
|
||||
|
||||
s_geom = sp.sqrt(dX ** 2 + dY ** 2 + dZ ** 2)
|
||||
|
||||
z_loc = (sp.cos(B_sp) * sp.cos(L_sp)) * dX + (sp.cos(B_sp) * sp.sin(L_sp)) * dY + (sp.sin(B_sp)) * dZ
|
||||
|
||||
cos_zw = z_loc / s_geom
|
||||
|
||||
sin_zw = sp.sqrt(1 - cos_zw ** 2)
|
||||
|
||||
denom = (s_geom ** 2) * sin_zw
|
||||
|
||||
d_zw_dX_zp = (dX * cos_zw - s_geom * sp.cos(B_sp) * sp.cos(L_sp)) / denom
|
||||
|
||||
d_zw_dY_zp = (dY * cos_zw - s_geom * sp.cos(B_sp) * sp.sin(L_sp)) / denom
|
||||
|
||||
d_zw_dZ_zp = (dZ * cos_zw - s_geom * sp.sin(B_sp)) / denom
|
||||
|
||||
d_zw_dX_sp, d_zw_dY_sp, d_zw_dZ_sp = -d_zw_dX_zp, -d_zw_dY_zp, -d_zw_dZ_zp
|
||||
|
||||
zeile_A_Matrix = []
|
||||
|
||||
for punkt in liste_punktnummern:
|
||||
|
||||
if punkt == standpunkt:
|
||||
|
||||
zeile_A_Matrix.extend([d_zw_dX_sp, d_zw_dY_sp, d_zw_dZ_sp])
|
||||
|
||||
elif punkt == zielpunkt:
|
||||
|
||||
zeile_A_Matrix.extend([d_zw_dX_zp, d_zw_dY_zp, d_zw_dZ_zp])
|
||||
|
||||
else:
|
||||
|
||||
zeile_A_Matrix.extend([0, 0, 0])
|
||||
|
||||
# Zenitwinkel hat keine Orientierungsunbekannte
|
||||
|
||||
for orientierung in liste_orientierungsunbekannte:
|
||||
zeile_A_Matrix.append(0)
|
||||
|
||||
liste_A_zenitwinkel_zeilen.append(zeile_A_Matrix)
|
||||
|
||||
liste_zeilenbeschriftungen_zenitwinkel.append(
|
||||
|
||||
f"{beobachtungenID}_ZW_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}"
|
||||
|
||||
)
|
||||
|
||||
if liste_beobachtungsgleichungen_distanz:
|
||||
f_matrix_dist = sp.Matrix(liste_beobachtungsgleichungen_distanz)
|
||||
unbekanntenvektor = sp.Matrix(liste_unbekannte)
|
||||
A_dist = f_matrix_dist.jacobian(unbekanntenvektor)
|
||||
else:
|
||||
A_dist = None
|
||||
|
||||
if liste_A_richtung_zeilen:
|
||||
A_richtung = sp.Matrix(liste_A_richtung_zeilen)
|
||||
else:
|
||||
A_richtung = None
|
||||
|
||||
if liste_A_zenitwinkel_zeilen:
|
||||
A_zenitwinkel = sp.Matrix(liste_A_zenitwinkel_zeilen)
|
||||
else:
|
||||
A_zenitwinkel = None
|
||||
|
||||
A_gesamt = None
|
||||
liste_zeilenbeschriftungen_gesamt = []
|
||||
|
||||
if A_dist is not None:
|
||||
A_gesamt = A_dist
|
||||
liste_zeilenbeschriftungen_gesamt.extend(liste_zeilenbeschriftungen_distanz)
|
||||
|
||||
if A_richtung is not None:
|
||||
if A_gesamt is None:
|
||||
A_gesamt = A_richtung
|
||||
else:
|
||||
A_gesamt = A_gesamt.col_join(A_richtung)
|
||||
liste_zeilenbeschriftungen_gesamt.extend(liste_zeilenbeschriftungen_richtung)
|
||||
|
||||
if A_zenitwinkel is not None:
|
||||
if A_gesamt is None:
|
||||
A_gesamt = A_zenitwinkel
|
||||
else:
|
||||
A_gesamt = A_gesamt.col_join(A_zenitwinkel)
|
||||
liste_zeilenbeschriftungen_gesamt.extend(liste_zeilenbeschriftungen_zenitwinkel)
|
||||
|
||||
if A_gesamt is None:
|
||||
return None
|
||||
|
||||
self.liste_unbekanntenvektor_symbolisch = liste_unbekannte
|
||||
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Jacobi_Matrix_Symbolisch.csv", liste_unbekannte,
|
||||
liste_zeilenbeschriftungen_gesamt, A_gesamt, "Beobachtung")
|
||||
return A_gesamt, liste_unbekannte, liste_zeilenbeschriftungen_gesamt
|
||||
|
||||
def jacobi_matrix_symbolisch_alt(self):
|
||||
#liste_beobachtungsarten = ["tachymeter_distanz", "tachymeter_richtung", "tachymeter_zenitwinkel"]
|
||||
liste_beobachtungsarten = ["tachymeter_distanz", "tachymeter_richtung"]
|
||||
db_zugriff = Datenbankzugriff(self.pfad_datenbank)
|
||||
|
||||
liste_beobachtungen_rohdaten = []
|
||||
liste_punktnummern =[]
|
||||
|
||||
liste_orientierungsunbekannte = []
|
||||
|
||||
for beobachtungsart in liste_beobachtungsarten:
|
||||
liste_id_standpunkt_zielpunkt = db_zugriff.get_beobachtungen_id_beobachtungsgruppe_standpunkt_zielpunkt(beobachtungsart)
|
||||
|
||||
for beobachtungenID, beobachtungsgruppeID, standpunkt, zielpunkt in liste_id_standpunkt_zielpunkt:
|
||||
liste_beobachtungen_rohdaten.append(
|
||||
(beobachtungsart, beobachtungenID, beobachtungsgruppeID, standpunkt, zielpunkt)
|
||||
)
|
||||
|
||||
if standpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(standpunkt)
|
||||
if zielpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(zielpunkt)
|
||||
|
||||
if beobachtungsart == "tachymeter_richtung":
|
||||
if beobachtungsgruppeID not in liste_orientierungsunbekannte:
|
||||
liste_orientierungsunbekannte.append(beobachtungsgruppeID)
|
||||
if liste_beobachtungen_rohdaten == []:
|
||||
return None
|
||||
|
||||
#dict_punkt_symbole = {}
|
||||
liste_unbekannte = []
|
||||
|
||||
for punkt in liste_punktnummern:
|
||||
X, Y, Z = sp.symbols(f"X{punkt} Y{punkt} Z{punkt}")
|
||||
self.dict_punkt_symbole[punkt] = (X, Y, Z)
|
||||
liste_unbekannte.append(X)
|
||||
liste_unbekannte.append(Y)
|
||||
liste_unbekannte.append(Z)
|
||||
@@ -67,12 +313,12 @@ class FunktionalesModell:
|
||||
liste_zeilenbeschriftungen_richtung = []
|
||||
|
||||
for beobachtungsart, beobachtungenID, beobachtungsgruppeID, standpunkt, zielpunkt in liste_beobachtungen_rohdaten:
|
||||
X_sp, Y_sp, Z_sp = dict_punkt_symbole[standpunkt]
|
||||
X_zp, Y_zp, Z_zp = dict_punkt_symbole[zielpunkt]
|
||||
X_sp, Y_sp, Z_sp = self.dict_punkt_symbole[standpunkt]
|
||||
X_zp, Y_zp, Z_zp = self.dict_punkt_symbole[zielpunkt]
|
||||
B_sp, L_sp = sp.symbols(f"B{standpunkt} L{standpunkt}")
|
||||
alpha = sp.symbols(f"alpha{standpunkt}_{zielpunkt}")
|
||||
zw = sp.symbols(f"zw{standpunkt}_{zielpunkt}")
|
||||
s = sp.symbols(f"s{standpunkt}_{zielpunkt}")
|
||||
alpha = sp.symbols(f"{beobachtungenID}_R_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
zw = sp.symbols(f"{beobachtungenID}_ZW_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
s = sp.symbols(f"{beobachtungenID}_SD_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
|
||||
if beobachtungsart == "tachymeter_distanz":
|
||||
beobachtungsgleichung = sp.sqrt(
|
||||
@@ -81,7 +327,7 @@ class FunktionalesModell:
|
||||
+ (Z_zp - Z_sp) ** 2
|
||||
)
|
||||
liste_beobachtungsgleichungen_distanz.append(beobachtungsgleichung)
|
||||
liste_zeilenbeschriftungen_distanz.append(f"SD {beobachtungsgruppeID} {standpunkt}-{zielpunkt}")
|
||||
liste_zeilenbeschriftungen_distanz.append(f"{beobachtungenID}_SD_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
|
||||
if beobachtungsart == "tachymeter_richtung":
|
||||
#for beobachtungenID, beobachtungsgruppeID, standpunkt, zielpunkt in liste_id_standpunkt_zielpunkt:
|
||||
@@ -110,7 +356,7 @@ class FunktionalesModell:
|
||||
|
||||
liste_A_richtung_zeilen.append(zeile_A_Matrix)
|
||||
liste_zeilenbeschriftungen_richtung.append(
|
||||
f"R {beobachtungsgruppeID} {standpunkt}-{zielpunkt}"
|
||||
f"{beobachtungenID}_R_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}"
|
||||
)
|
||||
|
||||
if beobachtungsart == "tachymeter_zenitwinkel":
|
||||
@@ -135,7 +381,7 @@ class FunktionalesModell:
|
||||
|
||||
liste_A_richtung_zeilen.append(zeile_A_Matrix)
|
||||
liste_zeilenbeschriftungen_richtung.append(
|
||||
f"ZW {beobachtungsgruppeID} {standpunkt}-{zielpunkt}"
|
||||
f"{beobachtungenID}_ZW_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}"
|
||||
)
|
||||
|
||||
if liste_beobachtungsgleichungen_distanz:
|
||||
@@ -164,22 +410,291 @@ class FunktionalesModell:
|
||||
else:
|
||||
return None
|
||||
|
||||
# --- Export der A_jacobian-Matrix in eine CSV-Datei ---
|
||||
dateiname_export = "Jacobi_Matrix.csv"
|
||||
self.liste_unbekanntenvektor_symbolisch = liste_unbekannte
|
||||
|
||||
with open(dateiname_export, "w", newline="", encoding="utf-8") as csvfile:
|
||||
writer = csv.writer(csvfile, delimiter=";")
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Jacobi_Matrix_Symbolisch.csv", liste_unbekannte,
|
||||
liste_zeilenbeschriftungen_gesamt, A_gesamt, "Beobachtung")
|
||||
return A_gesamt, liste_unbekannte, liste_zeilenbeschriftungen_gesamt
|
||||
|
||||
# Kopfzeile: leere Ecke + Namen der Unbekannten
|
||||
kopfzeile = ["Beobachtung"]
|
||||
for unbekannte in liste_unbekannte:
|
||||
kopfzeile.append(str(unbekannte))
|
||||
writer.writerow(kopfzeile)
|
||||
def jacobi_matrix_zahlen_iteration_0(self, A_symbolisch, koordinatenart, liste_unbekannte = None, liste_zeilenbeschriftungen_gesamt = None):
|
||||
|
||||
# Zeilen: Standpunkt-Zielpunkt + Jacobimatrix-Zeile
|
||||
for zeilenbeschriftung, zeile in zip(liste_zeilenbeschriftungen_gesamt, A_gesamt.tolist()):
|
||||
zeile_als_text = [zeilenbeschriftung] + [str(eintrag) for eintrag in zeile]
|
||||
writer.writerow(zeile_als_text)
|
||||
if koordinatenart == "naeherung_us":
|
||||
A_numerisch = A_symbolisch.xreplace(self.substitutionen_dict)
|
||||
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Jacobi_Matrix_Numerisch_Iteration0.csv", liste_unbekannte,
|
||||
liste_zeilenbeschriftungen_gesamt, A_numerisch, "Beobachtung")
|
||||
|
||||
return A_numerisch
|
||||
else:
|
||||
print("Koordinaten noch nicht implementiert!")
|
||||
|
||||
def beobachtungsvektor_numerisch(self, liste_beobachtungsvektor_symbolisch):
|
||||
liste_beobachtungsvektor_numerisch = []
|
||||
for beobachtung_symbolisch in liste_beobachtungsvektor_symbolisch:
|
||||
liste_beobachtungsvektor_numerisch.append(self.substitutionen_dict[sp.Symbol(beobachtung_symbolisch)])
|
||||
|
||||
beobachtungsvektor_numerisch = sp.Matrix(liste_beobachtungsvektor_numerisch)
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Beobachtungsvektor_Numerisch.csv", [""], liste_beobachtungsvektor_symbolisch, beobachtungsvektor_numerisch, "Beobachtungsvektor")
|
||||
return beobachtungsvektor_numerisch
|
||||
|
||||
def beobachtungsvektor_naeherung_symbolisch_alt(self, liste_beobachtungsvektor_symbolisch):
|
||||
liste_beobachtungsgleichungen = []
|
||||
self.dict_punkt_symbole = {}
|
||||
liste_punktnummern = []
|
||||
|
||||
for beobachtung_symbolisch in liste_beobachtungsvektor_symbolisch:
|
||||
aufgeteilt = beobachtung_symbolisch.split("_")
|
||||
standpunkt = str(aufgeteilt[3])
|
||||
zielpunkt = str(aufgeteilt[4])
|
||||
|
||||
if standpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(standpunkt)
|
||||
if zielpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(zielpunkt)
|
||||
|
||||
for punkt in liste_punktnummern:
|
||||
X, Y, Z = sp.symbols(f"X{punkt} Y{punkt} Z{punkt}")
|
||||
self.dict_punkt_symbole[str(punkt)] = (X, Y, Z)
|
||||
|
||||
for beobachtung_symbolisch in liste_beobachtungsvektor_symbolisch:
|
||||
aufgeteilt = beobachtung_symbolisch.split("_")
|
||||
#beobachtungen_ID = aufgeteilt[0]
|
||||
beobachtungsart = aufgeteilt[1] # "SD", "R", "ZW"
|
||||
#beobachtungsgruppeID = aufgeteilt[2]
|
||||
standpunkt = str(aufgeteilt[3])
|
||||
zielpunkt = str(aufgeteilt[4])
|
||||
|
||||
if beobachtungsart == "SD":
|
||||
X_sp, Y_sp, Z_sp = self.dict_punkt_symbole[standpunkt]
|
||||
X_zp, Y_zp, Z_zp = self.dict_punkt_symbole[zielpunkt]
|
||||
|
||||
beobachtungsgleichung = sp.sqrt(
|
||||
(X_zp - X_sp) ** 2
|
||||
+ (Y_zp - Y_sp) ** 2
|
||||
+ (Z_zp - Z_sp) ** 2
|
||||
)
|
||||
liste_beobachtungsgleichungen.append(beobachtungsgleichung)
|
||||
elif beobachtungsart == "R":
|
||||
X_sp, Y_sp, Z_sp = self.dict_punkt_symbole[standpunkt]
|
||||
X_zp, Y_zp, Z_zp = self.dict_punkt_symbole[zielpunkt]
|
||||
|
||||
dX = X_zp - X_sp
|
||||
dY = Y_zp - Y_sp
|
||||
dZ = Z_zp - Z_sp
|
||||
|
||||
B_sp = sp.Symbol(f"B{standpunkt}")
|
||||
L_sp = sp.Symbol(f"L{standpunkt}")
|
||||
|
||||
O_sp = sp.Symbol(f"O{beobachtungsgruppeID}")
|
||||
|
||||
x = (-sp.sin(B_sp) * sp.cos(L_sp)) * dX + (-sp.sin(B_sp) * sp.sin(L_sp)) * dY + (sp.cos(B_sp)) * dZ
|
||||
y = (-sp.sin(L_sp)) * dX + (sp.cos(L_sp)) * dY
|
||||
|
||||
a12 = sp.atan2(y, x)
|
||||
|
||||
beobachtungsgleichung = a12 - O_sp
|
||||
liste_beobachtungsgleichungen.append(beobachtungsgleichung)
|
||||
|
||||
beobachtungsvektor_naeherung_symbolisch = sp.Matrix(liste_beobachtungsgleichungen)
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Beobachtungsvektor_Näherung_Symbolisch.csv", [""],
|
||||
liste_beobachtungsvektor_symbolisch, beobachtungsvektor_naeherung_symbolisch, "Beobachtungsvektor")
|
||||
|
||||
return beobachtungsvektor_naeherung_symbolisch
|
||||
|
||||
def beobachtungsvektor_naeherung_symbolisch(self, liste_beobachtungsvektor_symbolisch):
|
||||
liste_beobachtungsgleichungen = []
|
||||
self.dict_punkt_symbole = {}
|
||||
liste_punktnummern = []
|
||||
|
||||
for beobachtung_symbolisch in liste_beobachtungsvektor_symbolisch:
|
||||
aufgeteilt = beobachtung_symbolisch.split("_")
|
||||
standpunkt = str(aufgeteilt[3])
|
||||
zielpunkt = str(aufgeteilt[4])
|
||||
|
||||
if standpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(standpunkt)
|
||||
if zielpunkt not in liste_punktnummern:
|
||||
liste_punktnummern.append(zielpunkt)
|
||||
|
||||
for punkt in liste_punktnummern:
|
||||
X, Y, Z = sp.symbols(f"X{punkt} Y{punkt} Z{punkt}")
|
||||
self.dict_punkt_symbole[str(punkt)] = (X, Y, Z)
|
||||
|
||||
for beobachtung_symbolisch in liste_beobachtungsvektor_symbolisch:
|
||||
aufgeteilt = beobachtung_symbolisch.split("_")
|
||||
#beobachtungen_ID = aufgeteilt[0]
|
||||
beobachtungsart = aufgeteilt[1] # "SD", "R", "ZW"
|
||||
beobachtungsgruppeID = aufgeteilt[2]
|
||||
standpunkt = str(aufgeteilt[3])
|
||||
zielpunkt = str(aufgeteilt[4])
|
||||
|
||||
X_sp, Y_sp, Z_sp = self.dict_punkt_symbole[standpunkt]
|
||||
X_zp, Y_zp, Z_zp = self.dict_punkt_symbole[zielpunkt]
|
||||
|
||||
dX = X_zp - X_sp
|
||||
dY = Y_zp - Y_sp
|
||||
dZ = Z_zp - Z_sp
|
||||
s = sp.sqrt(dX ** 2 + dY ** 2 + dZ ** 2) # Schrägstrecke
|
||||
|
||||
B_sp = sp.Symbol(f"B{standpunkt}")
|
||||
L_sp = sp.Symbol(f"L{standpunkt}")
|
||||
|
||||
if beobachtungsart == "SD":
|
||||
|
||||
s_geom = sp.sqrt(dX ** 2 + dY ** 2 + dZ ** 2)
|
||||
liste_beobachtungsgleichungen.append(s_geom)
|
||||
|
||||
elif beobachtungsart == "R":
|
||||
|
||||
O_sp = sp.Symbol(f"O{beobachtungsgruppeID}")
|
||||
|
||||
# Lokales System: x_loc = Nord, y_loc = Ost
|
||||
x_loc = (-sp.sin(B_sp) * sp.cos(L_sp)) * dX + (-sp.sin(B_sp) * sp.sin(L_sp)) * dY + (sp.cos(B_sp)) * dZ
|
||||
y_loc = (-sp.sin(L_sp)) * dX + (sp.cos(L_sp)) * dY
|
||||
|
||||
a12 = sp.atan2(y_loc, x_loc)
|
||||
|
||||
# Richtung nach Otepka: r = a12 - O
|
||||
liste_beobachtungsgleichungen.append(a12 - O_sp)
|
||||
|
||||
|
||||
return A_gesamt
|
||||
elif beobachtungsart == "ZW":
|
||||
|
||||
dX = X_zp - X_sp
|
||||
|
||||
dY = Y_zp - Y_sp
|
||||
|
||||
dZ = Z_zp - Z_sp
|
||||
|
||||
s_geom = sp.sqrt(dX ** 2 + dY ** 2 + dZ ** 2)
|
||||
|
||||
z_loc = (sp.cos(B_sp) * sp.cos(L_sp)) * dX + (sp.cos(B_sp) * sp.sin(L_sp)) * dY + (sp.sin(B_sp)) * dZ
|
||||
|
||||
zw = sp.acos(z_loc / s_geom)
|
||||
|
||||
liste_beobachtungsgleichungen.append(zw)
|
||||
|
||||
beobachtungsvektor_naeherung_symbolisch = sp.Matrix(liste_beobachtungsgleichungen)
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Beobachtungsvektor_Näherung_Symbolisch.csv", [""],
|
||||
liste_beobachtungsvektor_symbolisch, beobachtungsvektor_naeherung_symbolisch, "Beobachtungsvektor")
|
||||
|
||||
return beobachtungsvektor_naeherung_symbolisch
|
||||
|
||||
def beobachtungsvektor_naeherung_numerisch_iteration0(self, liste_beobachtungsvektor_symbolisch, beobachtungsvektor_naeherung_symbolisch):
|
||||
beobachtungsvektor_naeherung_numerisch_iteration0 = beobachtungsvektor_naeherung_symbolisch.xreplace(self.substitutionen_dict)
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Beobachtungsvektor_Näherung_Numerisch_Iteration0.csv", [""],
|
||||
liste_beobachtungsvektor_symbolisch, beobachtungsvektor_naeherung_numerisch_iteration0,
|
||||
"Beobachtungsvektor")
|
||||
|
||||
return beobachtungsvektor_naeherung_numerisch_iteration0
|
||||
|
||||
def unbekanntenvektor_symbolisch(self, liste_unbekannte):
|
||||
unbekanntenvektor_symbolisch = sp.Matrix(liste_unbekannte)
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Unbekanntenvektor_Symbolisch.csv", [""], liste_unbekannte, unbekanntenvektor_symbolisch,
|
||||
"Unbekanntenvektor")
|
||||
return(unbekanntenvektor_symbolisch)
|
||||
|
||||
def unbekanntenvektor_numerisch(self, liste_unbekanntenvektor_symbolisch, unbekanntenvektor_symbolisch, dX_Vektor = None, unbekanntenvektor_neumerisch_vorherige_Iteration = None):
|
||||
if not hasattr(self, "liste_unbekanntenvektor_symbolisch"):
|
||||
self.liste_unbekanntenvektor_symbolisch = liste_unbekanntenvektor_symbolisch
|
||||
|
||||
if dX_Vektor is None and unbekanntenvektor_neumerisch_vorherige_Iteration is None:
|
||||
unbekanntenvektor_numerisch = unbekanntenvektor_symbolisch.xreplace(self.substitutionen_dict)
|
||||
else:
|
||||
unbekanntenvektor_numerisch = unbekanntenvektor_neumerisch_vorherige_Iteration + dX_Vektor
|
||||
|
||||
self.substitutionen_dict = self.dict_substitutionen_uebergeordnetes_system(unbekanntenvektor_numerisch)
|
||||
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Unbekanntenvektor_Numerisch_Iteration0.csv", [""],
|
||||
liste_unbekanntenvektor_symbolisch, unbekanntenvektor_numerisch,
|
||||
"Unbekanntenvektor")
|
||||
return unbekanntenvektor_numerisch
|
||||
|
||||
def unbekanntenvektor_numerisch_to_dict_unbekanntenvektor(self, liste_unbekanntenvektor_symbolisch, unbekanntenvektor_numerisch):
|
||||
dict_unbekanntenvektor_numerisch = {}
|
||||
index = 0
|
||||
|
||||
for symbol in liste_unbekanntenvektor_symbolisch:
|
||||
name = str(symbol)
|
||||
if not name.startswith("X"):
|
||||
continue
|
||||
|
||||
punktnummer = str(name[1:])
|
||||
|
||||
dict_unbekanntenvektor_numerisch[punktnummer] = sp.Matrix([
|
||||
unbekanntenvektor_numerisch[index],
|
||||
unbekanntenvektor_numerisch[index + 1],
|
||||
unbekanntenvektor_numerisch[index + 2]
|
||||
])
|
||||
index += 3
|
||||
return dict_unbekanntenvektor_numerisch
|
||||
|
||||
|
||||
|
||||
def berechnung_dl(self, beobachtungsvektor_numerisch, beobachtungsvektor_naeherung_numerisch):
|
||||
dl = beobachtungsvektor_numerisch - beobachtungsvektor_naeherung_numerisch
|
||||
for i, name in enumerate(liste_beobachtungsvektor_symbolisch):
|
||||
if "_R_" in name:
|
||||
dl[i] = sp.atan2(sp.sin(dl[i]), sp.cos(dl[i])) # wrap auf (-pi, pi]
|
||||
|
||||
return dl
|
||||
|
||||
def dict_substitutionen_uebergeordnetes_system(self, unbekanntenvektor_aus_iteration = None):
|
||||
db_zugriff = Datenbankzugriff(self.pfad_datenbank)
|
||||
if unbekanntenvektor_aus_iteration is None:
|
||||
dict_koordinaten = db_zugriff.get_koordinaten("naeherung_us")
|
||||
else:
|
||||
dict_koordinaten = self.unbekanntenvektor_numerisch_to_dict_unbekanntenvektor(
|
||||
self.liste_unbekanntenvektor_symbolisch,
|
||||
unbekanntenvektor_aus_iteration
|
||||
)
|
||||
|
||||
dict_koordinaten_B_L = self.berechnungen.geometrische_breite_laenge(dict_koordinaten)
|
||||
liste_beobachtungen = db_zugriff.get_beobachtungen_from_beobachtungenid()
|
||||
substitutionen = {}
|
||||
|
||||
for punktnummer, vektor in dict_koordinaten_B_L.items():
|
||||
X_sym, Y_sym, Z_sym, B_sym, L_Sym = sp.symbols(
|
||||
f"X{punktnummer} Y{punktnummer} Z{punktnummer} B{punktnummer} L{punktnummer}")
|
||||
|
||||
substitutionen[X_sym] = vektor[0][0]
|
||||
substitutionen[Y_sym] = vektor[0][1]
|
||||
substitutionen[Z_sym] = vektor[0][2]
|
||||
substitutionen[B_sym] = vektor[1]
|
||||
substitutionen[L_Sym] = vektor[2]
|
||||
|
||||
for standpunkt, zielpunkt, beobachtungenID, beobachtungsgruppeID, tachymeter_richtung, tachymeter_zenitwinkel, tachymeter_distanz in liste_beobachtungen:
|
||||
alpha = sp.symbols(f"{beobachtungenID}_R_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
zw = sp.symbols(f"{beobachtungenID}_ZW_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
s = sp.symbols(f"{beobachtungenID}_SD_{beobachtungsgruppeID}_{standpunkt}_{zielpunkt}")
|
||||
|
||||
substitutionen[alpha] = tachymeter_richtung
|
||||
substitutionen[zw] = tachymeter_zenitwinkel
|
||||
substitutionen[s] = tachymeter_distanz
|
||||
|
||||
if unbekanntenvektor_aus_iteration is not None:
|
||||
dict_O = self.unbekanntenvektor_numerisch_to_dict_orientierungen(
|
||||
self.liste_unbekanntenvektor_symbolisch,
|
||||
unbekanntenvektor_aus_iteration
|
||||
)
|
||||
for orientierungs_id, wert in dict_O.items():
|
||||
substitutionen[sp.Symbol(f"O{orientierungs_id}")] = wert
|
||||
else:
|
||||
for standpunkt, zielpunkt, beobachtungenID, beobachtungsgruppeID, *_ in liste_beobachtungen:
|
||||
O_sym = sp.Symbol(f"O{beobachtungsgruppeID}")
|
||||
if O_sym not in substitutionen:
|
||||
substitutionen[O_sym] = 0
|
||||
|
||||
return substitutionen
|
||||
|
||||
def unbekanntenvektor_numerisch_to_dict_orientierungen(self, liste_unbekanntenvektor_symbolisch,
|
||||
unbekanntenvektor_numerisch):
|
||||
dict_O = {}
|
||||
|
||||
for i, symbol in enumerate(liste_unbekanntenvektor_symbolisch):
|
||||
name = str(symbol)
|
||||
if name.startswith("O"):
|
||||
orientierungs_id = name[1:]
|
||||
dict_O[orientierungs_id] = unbekanntenvektor_numerisch[i]
|
||||
|
||||
return dict_O
|
||||
|
||||
50
Import.py
50
Import.py
@@ -2,6 +2,9 @@ import csv
|
||||
import sqlite3
|
||||
from decimal import Decimal
|
||||
|
||||
import Berechnungen
|
||||
|
||||
|
||||
class Import:
|
||||
def __init__(self, pfad_datenbank):
|
||||
self.pfad_datenbank = pfad_datenbank
|
||||
@@ -181,21 +184,23 @@ class Import:
|
||||
# print(liste)
|
||||
richtung1 = self.string_to_decimal(liste_aktueller_zielpunkt[5])
|
||||
richtung2 = self.string_to_decimal(liste[5]) - Decimal(200)
|
||||
zenitwinkel_vollsatz = (self.string_to_decimal(liste_aktueller_zielpunkt[6]) - self.string_to_decimal(
|
||||
zenitwinkel_vollsatz_gon = (self.string_to_decimal(liste_aktueller_zielpunkt[6]) - self.string_to_decimal(
|
||||
liste[6]) + 400) / 2
|
||||
zenitwinkel_vollsatz_rad = Berechnungen.Einheitenumrechnung.gon_to_rad_Decimal(zenitwinkel_vollsatz_gon)
|
||||
distanz_vollsatz = (self.string_to_decimal(liste_aktueller_zielpunkt[7]) + self.string_to_decimal(
|
||||
liste[7])) / 2
|
||||
if richtung2 < 0:
|
||||
richtung2 += Decimal(400)
|
||||
elif richtung2 > 400:
|
||||
richtung2 -= Decimal(400)
|
||||
richtung_vollsatz = (richtung1 + richtung2) / 2
|
||||
richtung_vollsatz_gon = (richtung1 + richtung2) / 2
|
||||
richtung_vollsatz_rad = Berechnungen.Einheitenumrechnung.gon_to_rad_Decimal(richtung_vollsatz_gon)
|
||||
|
||||
# print(richtung_vollsatz)
|
||||
# print(zenitwinkel_vollsatz)
|
||||
# print(distanz_vollsatz)
|
||||
liste_beobachtungen_import.append(
|
||||
[liste[0], liste[3], liste[4], richtung_vollsatz, zenitwinkel_vollsatz, distanz_vollsatz])
|
||||
[liste[0], liste[3], liste[4], richtung_vollsatz_rad, zenitwinkel_vollsatz_rad, distanz_vollsatz])
|
||||
|
||||
del liste_beobachtungen_vorbereitung[index]
|
||||
del liste_beobachtungen_vorbereitung[0]
|
||||
@@ -218,4 +223,41 @@ class Import:
|
||||
con.commit()
|
||||
cursor.close()
|
||||
con.close()
|
||||
print(f"Der Import der Datei {pfad_datei} wurde erfolgreich abgeschlossen.")
|
||||
print(f"Der Import der Datei {pfad_datei} wurde erfolgreich abgeschlossen.")
|
||||
|
||||
def import_koordinaten_gnss(self, pfad_datei, liste_sapos_stationen_genauigkeiten):
|
||||
liste_zeilen = []
|
||||
dict_koordinaten = {}
|
||||
|
||||
con = sqlite3.connect(self.pfad_datenbank)
|
||||
cursor = con.cursor()
|
||||
|
||||
with (open(pfad_datei, newline="", encoding="utf-8") as csvfile):
|
||||
r = csv.reader(csvfile, delimiter = ";")
|
||||
for i, row in enumerate(r):
|
||||
row_neu = []
|
||||
for eintrag in row:
|
||||
eintrag = str(eintrag).strip()
|
||||
|
||||
eintrag = eintrag.replace("'", "")
|
||||
aufgeteilt = eintrag.split()
|
||||
for teil in aufgeteilt:
|
||||
teil = teil.split(",")
|
||||
row_neu.extend(teil)
|
||||
if row_neu[1] == 'Referenz' and row_neu[7] == '0.0000' and row_neu[8] == '0.0000' and row_neu[9] == '0.0000':
|
||||
row_neu[7] = liste_sapos_stationen_genauigkeiten[0]
|
||||
row_neu[8] = liste_sapos_stationen_genauigkeiten[1]
|
||||
row_neu[9] = liste_sapos_stationen_genauigkeiten[2]
|
||||
cursor.execute(f"""INSERT INTO Netzpunkte (punktnummer, naeherungx_us, naeherungy_us, naeherungz_us, stabw_vorinfo_x, stabw_vorinfo_y, stabw_vorinfo_z) VALUES (?, ?, ?, ?, ?, ?, ?) ON CONFLICT (punktnummer) DO UPDATE SET naeherungx_us = excluded.naeherungx_us,
|
||||
naeherungy_us = excluded.naeherungy_us,
|
||||
naeherungz_us = excluded.naeherungz_us,
|
||||
stabw_vorinfo_x = excluded.stabw_vorinfo_x,
|
||||
stabw_vorinfo_y = excluded.stabw_vorinfo_y,
|
||||
stabw_vorinfo_z = excluded.stabw_vorinfo_z""", (row_neu[0], row_neu[4], row_neu[5], row_neu[6], row_neu[7], row_neu[8], row_neu[9])
|
||||
)
|
||||
#liste_zeilen.append(row_neu)
|
||||
|
||||
|
||||
con.commit()
|
||||
con.close()
|
||||
return "Import der Koordinaten aus stationärem GNSS abgeschlossen."
|
||||
|
||||
2629
Jacobi_Matrix.csv
2629
Jacobi_Matrix.csv
File diff suppressed because it is too large
Load Diff
@@ -2,11 +2,21 @@ from dataclasses import dataclass
|
||||
from typing import Sequence, List, Dict
|
||||
import sympy as sp
|
||||
import numpy as np
|
||||
from decimal import Decimal
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
@dataclass
|
||||
class Genauigkeitsmaße:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def s0apost(v, P, r):
|
||||
vv = (v.T * P * v)[0, 0]
|
||||
s0apost = (Decimal(str(vv)) / Decimal(r)) ** Decimal("0.5")
|
||||
return s0apost
|
||||
|
||||
|
||||
|
||||
def helmertscher_punktfehler_3D(self, sigma_x: float, sigma_y: float, sigma_z: float) -> float:
|
||||
|
||||
109
Netzqualität_Zuverlässigkeit.py
Normal file
109
Netzqualität_Zuverlässigkeit.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Sequence, List, Dict
|
||||
import sympy as sp
|
||||
|
||||
|
||||
@dataclass
|
||||
class Zuverlaessigkeit:
|
||||
|
||||
def redundanzanalyse(self, r_vec: Sequence[float]) -> Dict[str, object]:
|
||||
r_s = [sp.sympify(r) for r in r_vec]
|
||||
EVi = [float(r * 100) for r in r_s]
|
||||
klassen = [self.klassifiziere_ri(float(r)) for r in r_s]
|
||||
|
||||
return {
|
||||
"r_i": [float(r) for r in r_s],
|
||||
"EVi": EVi,
|
||||
"klassen": klassen,
|
||||
"r_sum": float(sum(r_s)),
|
||||
"min_r": float(min(r_s)),
|
||||
"max_r": float(max(r_s)),
|
||||
}
|
||||
|
||||
|
||||
|
||||
def klassifiziere_ri(self, ri: float) -> str:
|
||||
if ri < 0.01:
|
||||
return "nicht kontrollierbar"
|
||||
elif ri < 0.10:
|
||||
return "schlecht kontrollierbar"
|
||||
elif ri < 0.30:
|
||||
return "ausreichend kontrollierbar"
|
||||
elif ri < 0.70:
|
||||
return "gut kontrollierbar"
|
||||
else:
|
||||
return "nahezu vollständig redundant"
|
||||
|
||||
|
||||
|
||||
def globaltest(self, sigma0_hat: float, sigma0_apriori: float, F_krit: float):
|
||||
s_hat = sp.sympify(sigma0_hat)
|
||||
s0 = sp.sympify(sigma0_apriori)
|
||||
Fk = sp.sympify(F_krit)
|
||||
|
||||
T_G = (s_hat**2) / (s0**2)
|
||||
H0 = bool(T_G <= Fk)
|
||||
|
||||
return {
|
||||
"T_G": float(T_G),
|
||||
"F_krit": float(Fk),
|
||||
"H0_angenommen": H0,
|
||||
}
|
||||
|
||||
|
||||
|
||||
def data_snooping(
|
||||
self,
|
||||
v: Sequence[float],
|
||||
Qv_diag: Sequence[float],
|
||||
r_vec: Sequence[float],
|
||||
sigma0_hat: float,
|
||||
k: float,
|
||||
) -> List[Dict[str, float | bool]]:
|
||||
|
||||
v_s = [sp.sympify(x) for x in v]
|
||||
Qv_s = [sp.sympify(q) for q in Qv_diag]
|
||||
r_s = [sp.sympify(r) for r in r_vec]
|
||||
s0 = sp.sympify(sigma0_hat)
|
||||
k_s = sp.sympify(k)
|
||||
|
||||
results = []
|
||||
|
||||
for vi, Qvi, ri in zip(v_s, Qv_s, r_s):
|
||||
|
||||
s_vi = s0 * sp.sqrt(Qvi)
|
||||
NV_i = sp.Abs(vi) / s_vi
|
||||
|
||||
if ri == 0:
|
||||
GRZW_i = sp.oo
|
||||
else:
|
||||
GRZW_i = (s_vi / ri) * k_s
|
||||
|
||||
auff = bool(NV_i > k_s)
|
||||
|
||||
results.append({
|
||||
"v_i": float(vi),
|
||||
"Qv_i": float(Qvi),
|
||||
"r_i": float(ri),
|
||||
"s_vi": float(s_vi),
|
||||
"NV_i": float(NV_i),
|
||||
"GRZW_i": float(GRZW_i if GRZW_i != sp.oo else float("inf")),
|
||||
"auffällig": auff,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
|
||||
def aeussere_zuverlaessigkeit_EF(self, r_vec: Sequence[float], delta0: float):
|
||||
delta = sp.sympify(delta0)
|
||||
EF_list = []
|
||||
for ri in r_vec:
|
||||
ri_s = sp.sympify(ri)
|
||||
if ri_s == 0:
|
||||
EF = sp.oo
|
||||
else:
|
||||
EF = sp.sqrt((1 - ri_s) / ri_s) * delta
|
||||
EF_list.append(float(EF if EF != sp.oo else float("inf")))
|
||||
|
||||
return EF_list
|
||||
@@ -1,66 +1,150 @@
|
||||
from typing import Dict, Any
|
||||
import sympy as sp
|
||||
from Stochastisches_Modell import StochastischesModell
|
||||
from Netzqualität_Genauigkeit import Genauigkeitsmaße
|
||||
from Datumsfestlegung import Datumsfestlegung
|
||||
import sympy as sp
|
||||
import Export
|
||||
|
||||
def iterative_ausgleichung(
|
||||
|
||||
def ausgleichung_global(
|
||||
A: sp.Matrix,
|
||||
l: sp.Matrix,
|
||||
modell: StochastischesModell,
|
||||
max_iter: int = 100,
|
||||
tol: float = 1e-3,
|
||||
) -> Dict[str, Any]:
|
||||
dl: sp.Matrix,
|
||||
Q_ll: sp.Matrix,
|
||||
x0: sp.Matrix,
|
||||
idx_X, idx_Y, idx_Z,
|
||||
anschluss_indices,
|
||||
anschluss_werte,
|
||||
Sigma_AA,
|
||||
):
|
||||
# 1) Datumsfestlegung (weiches Datum) System erweitern
|
||||
A_ext, dl_ext, Q_ext = Datumsfestlegung.weiches_datum(
|
||||
A=A,
|
||||
dl=dl,
|
||||
Q_ll=Q_ll,
|
||||
x0=x0,
|
||||
anschluss_indices=anschluss_indices,
|
||||
anschluss_werte=anschluss_werte,
|
||||
Sigma_AA=Sigma_AA,
|
||||
)
|
||||
|
||||
ergebnisse_iter = [] #Liste für Zwischenergebnisse
|
||||
# 2) Gewichtsmatrix P
|
||||
P = StochastischesModell.berechne_P(Q_ext)
|
||||
|
||||
for it in range(max_iter):
|
||||
Q_ll, P = modell.berechne_Qll_P() #Stochastisches Modell: Qll und P berechnen
|
||||
# 3) Normalgleichungsmatrix N und Absolutgliedvektor n
|
||||
N = A_ext.T * P * A_ext
|
||||
n = A_ext.T * P * dl_ext
|
||||
|
||||
N = A.T * P * A #Normalgleichungsmatrix N
|
||||
Q_xx = N.inv() #Kofaktormatrix der Unbekannten Qxx
|
||||
n = A.T * P * l #Absolutgliedvektor n
|
||||
# 4) Zuschlagsvektor dx
|
||||
dx = N.LUsolve(n)
|
||||
|
||||
dx = N.LUsolve(n) #Zuschlagsvektor dx
|
||||
# 5) Residuenvektor v
|
||||
v = dl - A * dx
|
||||
|
||||
v = l - A * dx #Residuenvektor v
|
||||
# 6) Kofaktormatrix der Unbekannten Q_xx
|
||||
Q_xx = StochastischesModell.berechne_Q_xx(N)
|
||||
|
||||
Q_vv = modell.berechne_Qvv(A, Q_ll, Q_xx) #Kofaktormatrix der Verbesserungen Qvv
|
||||
R = modell.berechne_R(Q_vv, P) #Redundanzmatrix R
|
||||
r = modell.berechne_r(R) #Redundanzanteile als Vektor r
|
||||
# 7) Kofaktormatrix der Beobachtungen Q_ll_dach
|
||||
Q_ll_dach = A * Q_xx * A.T
|
||||
|
||||
sigma_hat = modell.berechne_vks(v, P, r) #Varianzkomponentenschätzung durchführen
|
||||
# 8) Kofaktormatrix der Verbesserungen Q_vv
|
||||
Q_vv = StochastischesModell.berechne_Qvv(A, P, Q_xx)
|
||||
|
||||
ergebnisse_iter.append({ #Zwischenergebnisse speichern in Liste
|
||||
"iter": it + 1,
|
||||
"Q_ll": Q_ll,
|
||||
"P": P,
|
||||
"N": N,
|
||||
"Q_xx": Q_xx,
|
||||
"dx": dx,
|
||||
"v": v,
|
||||
"Q_vv": Q_vv,
|
||||
"R": R,
|
||||
"r": r,
|
||||
"sigma_hat": sigma_hat,
|
||||
"sigma0_groups": dict(modell.sigma0_groups),
|
||||
})
|
||||
# 9) Redundanzmatrix R und Redundanzanteile r
|
||||
R = StochastischesModell.berechne_R(Q_vv, P) #Redundanzmatrix R
|
||||
r = StochastischesModell.berechne_r(R) #Redundanzanteile als Vektor r
|
||||
redundanzanteile = A.shape[0] - A.shape[1] #n-u+d
|
||||
|
||||
if all(abs(val - 1.0) < tol for val in sigma_hat.values()): #Abbruchkriterium
|
||||
print(f"Konvergenz nach {it + 1} Iterationen erreicht.")
|
||||
break
|
||||
# 10) s0 a posteriori
|
||||
soaposteriori = Genauigkeitsmaße.s0apost(v, P, redundanzanteile)
|
||||
|
||||
modell.update_sigma0_von_vks(sigma_hat)
|
||||
# 11) Ausgabe
|
||||
dict_ausgleichung = {
|
||||
"dx": dx,
|
||||
"v": v,
|
||||
"P": P,
|
||||
"N": N,
|
||||
"Q_xx": Q_xx,
|
||||
"Q_ll_dach": Q_ll_dach,
|
||||
"Q_vv": Q_vv,
|
||||
"R": R,
|
||||
"r": r,
|
||||
"soaposteriori": soaposteriori,
|
||||
}
|
||||
|
||||
return {
|
||||
Export.Export.ausgleichung_to_datei(r"Zwischenergebnisse\Ausgleichung_Iteration0.csv", dict_ausgleichung)
|
||||
return dict_ausgleichung, dx
|
||||
|
||||
|
||||
|
||||
|
||||
def ausgleichung_lokal(
|
||||
A: sp.Matrix,
|
||||
dl: sp.Matrix,
|
||||
Q_ll: sp.Matrix,
|
||||
x0: sp.Matrix,
|
||||
idx_X, idx_Y, idx_Z,
|
||||
aktive_unbekannte_indices,
|
||||
mit_massstab: bool = True,
|
||||
):
|
||||
# 1) Gewichtsmatrix P
|
||||
P = StochastischesModell.berechne_P(Q_ll)
|
||||
|
||||
# 2) Normalgleichungsmatrix N und Absolutgliedvektor n
|
||||
N = A.T * P * A
|
||||
n = A.T * P * dl
|
||||
|
||||
# 3) Datumsfestlegung (Teilspurminimierung)
|
||||
G = Datumsfestlegung.raenderungsmatrix_G(x0, liste_punktnummern, mit_massstab=mit_massstab)
|
||||
aktive = Datumsfestlegung.datumskomponenten(auswahl, liste_punktnummern)
|
||||
E = Datumsfestlegung.auswahlmatrix_E(u=A.cols, aktive_unbekannte_indices=aktive)
|
||||
Gi = E * G
|
||||
|
||||
# 4) Zuschlagsvektor dx
|
||||
dx = Datumsfestlegung.berechne_dx_geraendert(N, n, Gi)
|
||||
|
||||
# 5) Residuenvektor v
|
||||
v = dl - A * dx
|
||||
|
||||
# 6) Kofaktormatrix der Unbekannten Q_xx
|
||||
N_inv = N.inv()
|
||||
N_inv_G = N_inv * Gi
|
||||
S = Gi.T * N_inv_G
|
||||
S_inv = S.inv()
|
||||
Q_xx = N_inv - N_inv_G * S_inv * N_inv_G.T
|
||||
|
||||
# 7) Kofaktormatrix der Beobachtungen Q_ll_dach
|
||||
Q_lhat_lhat = A * Q_xx * A.T
|
||||
|
||||
# 8) Kofaktormatrix der Verbesserungen Q_vv
|
||||
Q_vv = P.inv() - Q_lhat_lhat
|
||||
|
||||
# 9) Redundanzmatrix R, Redundanzanteile r, Redundanz
|
||||
R = Q_vv * P
|
||||
r_vec = sp.Matrix(R.diagonal())
|
||||
n_beob = A.rows
|
||||
u = A.cols
|
||||
d = Gi.shape[1]
|
||||
r_gesamt = n_beob - u + d
|
||||
|
||||
# 10) s0 a posteriori
|
||||
sigma0_apost = Genauigkeitsmaße.s0apost(v, P, r_gesamt)
|
||||
|
||||
# 11) Ausgabe
|
||||
dict_ausgleichung_lokal = {
|
||||
"dx": dx,
|
||||
"v": v,
|
||||
"Q_ll": Q_ll,
|
||||
"P": P,
|
||||
"N": N,
|
||||
"Q_xx": Q_xx,
|
||||
"Q_lhat_lhat": Q_lhat_lhat,
|
||||
"Q_vv": Q_vv,
|
||||
"R": R,
|
||||
"r": r,
|
||||
"sigma_hat": sigma_hat,
|
||||
"sigma0_groups": dict(modell.sigma0_groups),
|
||||
"history": ergebnisse_iter,
|
||||
}
|
||||
"r": r_vec,
|
||||
"r_gesamt": r_gesamt,
|
||||
"sigma0_apost": sigma0_apost,
|
||||
"G": G,
|
||||
"Gi": Gi,
|
||||
}
|
||||
|
||||
Export.Export.ausgleichung_to_datei(r"Zwischenergebnisse\Ausgleichung_Iteration0_lokal.csv", dict_ausgleichung_lokal)
|
||||
return dict_ausgleichung_lokal, dx
|
||||
@@ -1,49 +1,186 @@
|
||||
import sympy as sp
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, Tuple, Iterable
|
||||
from Export import Export
|
||||
from Datenbank import Datenbankzugriff
|
||||
|
||||
@dataclass
|
||||
class StochastischesModell:
|
||||
sigma_beob: Iterable[float] #σ der einzelnen Beobachtung
|
||||
group_beob: Iterable[int] #Gruppenzugehörigkeit jeder Beobachtung (Distanz, Richtung, GNSS, Nivellement,...,)
|
||||
sigma0_groups: Dict[int, float] = field(default_factory=dict) #σ0² für jede Gruppe
|
||||
n_beob: int
|
||||
sigma_beob: Iterable[float] =None #σ a priori der einzelnen Beobachtung
|
||||
gruppe_beob: Iterable[int] =None #Gruppenzugehörigkeit jeder Beobachtung (Distanz, Richtung, GNSS, Nivellement,...,)
|
||||
sigma0_gruppe: Dict[int, float] = field(default_factory=dict) #σ0² für jede Gruppe
|
||||
|
||||
|
||||
def __post_init__(self):
|
||||
self.sigma_beob = sp.Matrix(list(self.sigma_beob)) #Spaltenvektor
|
||||
self.group_beob = sp.Matrix(list(self.group_beob)) #Spaltenvektor
|
||||
# Defaults setzen
|
||||
if self.sigma_beob is None:
|
||||
self.sigma_beob = [1.0] * int(self.n_beob)
|
||||
|
||||
if self.sigma_beob.rows != self.group_beob.rows:
|
||||
raise ValueError("sigma_obs und group_ids müssen gleich viele Einträge haben.")
|
||||
if self.gruppe_beob is None:
|
||||
self.gruppe_beob = [1] * int(self.n_beob)
|
||||
|
||||
unique_groups = sorted({int(g) for g in self.group_beob}) #jede Beobachtungsgruppe wird genau einmal berücksichtigt
|
||||
# In SymPy-Spaltenvektoren umwandeln
|
||||
self.sigma_beob = sp.Matrix(list(self.sigma_beob))
|
||||
self.gruppe_beob = sp.Matrix(list(self.gruppe_beob))
|
||||
|
||||
# Dimension prüfen
|
||||
if self.sigma_beob.rows != self.gruppe_beob.rows:
|
||||
raise ValueError("sigma_beob und gruppe_beob müssen gleich viele Einträge haben.")
|
||||
|
||||
if self.sigma_beob.rows != int(self.n_beob):
|
||||
raise ValueError("n_beob passt nicht zur Länge von sigma_beob / gruppe_beob.")
|
||||
|
||||
# Fehlende Gruppen mit sigma0_sq = 1.0 ergänzen
|
||||
unique_groups = sorted({int(g) for g in self.gruppe_beob})
|
||||
for g in unique_groups:
|
||||
if g not in self.sigma0_groups: #Fehlende Gruppen mit σ_0j^2 = 1.0
|
||||
self.sigma0_groups[g] = 1.0
|
||||
if g not in self.sigma0_gruppe:
|
||||
self.sigma0_gruppe[g] = 1.0
|
||||
|
||||
|
||||
@property
|
||||
def n_beob(self) -> int:
|
||||
return int(self.sigma_beob.rows)
|
||||
|
||||
|
||||
def berechne_Qll_P(self) -> Tuple[sp.Matrix, sp.Matrix]:
|
||||
def berechne_Qll(self) -> Tuple[sp.Matrix, sp.Matrix]:
|
||||
n = self.n_beob
|
||||
Q_ll = sp.zeros(n, n)
|
||||
P = sp.zeros(n, n)
|
||||
|
||||
for i in range(self.n_beob):
|
||||
sigma_i = self.sigma_beob[i, 0] #σ-Wert der i-ten Beobachtung holen
|
||||
g = int(self.group_beob[i, 0]) #Gruppenzugehörigkeit der Beobachtung bestimmen
|
||||
sigma0_sq = self.sigma0_groups[g] #Den Varianzfaktor der Gruppe holen
|
||||
g = int(self.gruppe_beob[i, 0]) #Gruppenzugehörigkeit der Beobachtung bestimmen
|
||||
sigma0_sq = self.sigma0_gruppe[g] #Den Varianzfaktor der Gruppe holen
|
||||
q_ii = sigma_i**2 #σ² berechnen
|
||||
Q_ll[i, i] = q_ii #Diagonale
|
||||
P[i, i] = 1 / (sigma0_sq * q_ii) #durch VKS nicht mehr P=Qll^-1
|
||||
return Q_ll, P
|
||||
return Q_ll
|
||||
|
||||
def Qll_symbolisch(self, pfad_datenbank, liste_beobachtungen_symbolisch):
|
||||
liste_standardabweichungen_symbole = []
|
||||
liste_beobachtungen_symbolisch = [str(b) for b in liste_beobachtungen_symbolisch]
|
||||
Qll = sp.zeros(len(liste_beobachtungen_symbolisch), len(liste_beobachtungen_symbolisch))
|
||||
|
||||
db_zugriff = Datenbankzugriff(pfad_datenbank)
|
||||
dict_beobachtungenID_instrumenteID = db_zugriff.get_instrumenteID_beobachtungenID_dict()
|
||||
|
||||
for i, beobachtung_symbolisch_i in enumerate(liste_beobachtungen_symbolisch):
|
||||
aufgeteilt_i = beobachtung_symbolisch_i.split("_")
|
||||
beobachtungenID_i = int(aufgeteilt_i[0])
|
||||
instrumenteID_i = dict_beobachtungenID_instrumenteID[beobachtungenID_i]
|
||||
beobachtungsart_i = str(aufgeteilt_i[1])
|
||||
|
||||
if beobachtungsart_i == "SD":
|
||||
stabw_apriori_konstant = sp.Symbol(f"stabw_apriori_konstant_{beobachtungsart_i}_{instrumenteID_i}")
|
||||
stabw_apriori_streckenprop = sp.Symbol(f"stabw_apriori_streckenprop_{beobachtungsart_i}_{instrumenteID_i}")
|
||||
tachymeter_distanz = sp.Symbol(f"SD_{beobachtungenID_i}")
|
||||
|
||||
sigma = sp.sqrt(stabw_apriori_konstant ** 2 + (stabw_apriori_streckenprop * tachymeter_distanz / 1000000) ** 2)
|
||||
liste_standardabweichungen_symbole.append(sigma)
|
||||
|
||||
Qll[i, i] = sigma ** 2
|
||||
|
||||
elif beobachtungsart_i == "R" or beobachtungsart_i == "ZW":
|
||||
stabw_apriori_konstant = sp.Symbol(f"stabw_apriori_konstant_{beobachtungsart_i}_{instrumenteID_i}")
|
||||
|
||||
stabw_apriori_konstant_distanz = sp.Symbol(f"stabw_apriori_konstant_SD_{instrumenteID_i}")
|
||||
tachymeter_distanz = sp.Symbol(f"SD_{beobachtungenID_i}")
|
||||
|
||||
sigma = sp.sqrt(
|
||||
stabw_apriori_konstant ** 2 + (stabw_apriori_konstant_distanz / tachymeter_distanz) ** 2)
|
||||
liste_standardabweichungen_symbole.append(sigma)
|
||||
|
||||
Qll[i, i] = sigma ** 2
|
||||
|
||||
for j in range(i + 1, len(liste_beobachtungen_symbolisch)):
|
||||
beobachtung_symbolisch_j = liste_beobachtungen_symbolisch[j]
|
||||
aufgeteilt_j = beobachtung_symbolisch_j.split("_")
|
||||
beobachtungsart_j = str(aufgeteilt_j[1])
|
||||
|
||||
if beobachtungsart_i == "SD" and beobachtungsart_j == "SD":
|
||||
Qll[i, j] = 0
|
||||
Qll[j, i] = 0
|
||||
|
||||
Export.matrix_to_csv(r"Zwischenergebnisse\Qll_Symbolisch.csv", liste_beobachtungen_symbolisch, liste_beobachtungen_symbolisch, Qll, "Qll")
|
||||
return Qll
|
||||
|
||||
def Qll_numerisch(self, pfad_datenbank, Qll_Matrix_Symbolisch, liste_beobachtungen_symbolisch):
|
||||
db_zugriff = Datenbankzugriff(pfad_datenbank)
|
||||
dict_genauigkeiten = db_zugriff.get_genauigkeiten_dict()
|
||||
dict_beobachtungenID_instrumenteID = db_zugriff.get_instrumenteID_beobachtungenID_dict()
|
||||
|
||||
liste_beobachtungen = db_zugriff.get_beobachtungen_from_beobachtungenid()
|
||||
dict_beobachtungenID_distanz = {}
|
||||
for standpunkt, zielpunkt, beobachtungenID, beobachtungsgruppeID, tachymeter_richtung, tachymeter_zenitwinkel, tachymeter_distanz in liste_beobachtungen:
|
||||
dict_beobachtungenID_distanz[int(beobachtungenID)] = tachymeter_distanz
|
||||
|
||||
dict_genauigkeiten_neu = {}
|
||||
for genauigkeitenID, eintrag in dict_genauigkeiten.items():
|
||||
instrumenteID = int(eintrag[0])
|
||||
beobachtungsart = str(eintrag[1])
|
||||
stabw_apriori_konstant = eintrag[2]
|
||||
stabw_apriori_streckenprop = eintrag[3]
|
||||
dict_genauigkeiten_neu[(instrumenteID, beobachtungsart)] = (stabw_apriori_konstant,
|
||||
stabw_apriori_streckenprop)
|
||||
|
||||
substitutionen = {}
|
||||
|
||||
dict_konstante_sd = {}
|
||||
for (instrumenteID, beobachtungsart), (stabw_apriori_konstant,
|
||||
stabw_apriori_streckenprop) in dict_genauigkeiten_neu.items():
|
||||
if beobachtungsart == "Tachymeter_Strecke":
|
||||
if stabw_apriori_konstant is not None:
|
||||
dict_konstante_sd[instrumenteID] = float(stabw_apriori_konstant)
|
||||
|
||||
for (instrumenteID, beobachtungsart), (stabw_apriori_konstant,
|
||||
stabw_apriori_streckenprop) in dict_genauigkeiten_neu.items():
|
||||
|
||||
if beobachtungsart == "Tachymeter_Strecke":
|
||||
beobachtungsart_kurz = "SD"
|
||||
elif beobachtungsart == "Tachymeter_Richtung":
|
||||
beobachtungsart_kurz = "R"
|
||||
elif beobachtungsart == "Tachymeter_Zenitwinkel":
|
||||
beobachtungsart_kurz = "ZW"
|
||||
|
||||
|
||||
def berechne_Qvv(self, A: sp.Matrix, Q_ll: sp.Matrix, Q_xx: sp.Matrix) -> sp.Matrix:
|
||||
Q_vv = Q_ll - A * Q_xx * A.T
|
||||
if stabw_apriori_konstant is not None:
|
||||
substitutionen[sp.Symbol(f"stabw_apriori_konstant_{beobachtungsart_kurz}_{instrumenteID}")] = float(stabw_apriori_konstant)
|
||||
if stabw_apriori_streckenprop is not None:
|
||||
substitutionen[sp.Symbol(f"stabw_apriori_streckenprop_{beobachtungsart_kurz}_{instrumenteID}")] = float(stabw_apriori_streckenprop)
|
||||
|
||||
for instrumenteID, wert in dict_konstante_sd.items():
|
||||
substitutionen[sp.Symbol(f"stabw_apriori_konstant_SD_{instrumenteID}")] = float(wert)
|
||||
|
||||
liste_beobachtungen_symbolisch = [str(b) for b in liste_beobachtungen_symbolisch]
|
||||
|
||||
for beobachtung_symbolisch in liste_beobachtungen_symbolisch:
|
||||
aufgeteilt = beobachtung_symbolisch.split("_")
|
||||
beobachtungenID = int(aufgeteilt[0])
|
||||
|
||||
distanz = dict_beobachtungenID_distanz.get(beobachtungenID, None)
|
||||
if distanz is not None:
|
||||
substitutionen[sp.Symbol(f"SD_{beobachtungenID}")] = float(distanz)
|
||||
|
||||
Qll_numerisch = Qll_Matrix_Symbolisch.xreplace(substitutionen)
|
||||
|
||||
Export.matrix_to_csv(
|
||||
r"Zwischenergebnisse\Qll_Numerisch.csv",
|
||||
liste_beobachtungen_symbolisch,
|
||||
liste_beobachtungen_symbolisch,
|
||||
Qll_numerisch,
|
||||
"Qll"
|
||||
)
|
||||
|
||||
return Qll_numerisch
|
||||
|
||||
def berechne_P(Q_ll: sp.Matrix) -> sp.Matrix:
|
||||
P = Q_ll.inv()
|
||||
return P
|
||||
|
||||
|
||||
def berechne_Q_xx(N: sp.Matrix) -> sp.Matrix:
|
||||
if N.rows != N.cols:
|
||||
raise ValueError("N muss eine quadratische Matrix sein")
|
||||
Q_xx = N.inv()
|
||||
return Q_xx
|
||||
|
||||
|
||||
def berechne_Qvv(self, A: sp.Matrix, P: sp.Matrix, Q_xx: sp.Matrix) -> sp.Matrix:
|
||||
Q_vv = P.inv() - A * Q_xx * A.T
|
||||
return Q_vv #Kofaktormatrix der Beobachtungsresiduen
|
||||
|
||||
|
||||
@@ -57,30 +194,4 @@ class StochastischesModell:
|
||||
r = sp.zeros(n, 1)
|
||||
for i in range(n):
|
||||
r[i, 0] = R[i, i]
|
||||
return r #Redundanzanteile
|
||||
|
||||
|
||||
def berechne_vks(self,v: sp.Matrix, P: sp.Matrix, r: sp.Matrix) -> Dict[int, float]:
|
||||
if v.rows != self.n_beob:
|
||||
raise ValueError("v passt nicht zur Anzahl der Beobachtungen.")
|
||||
gruppen = sorted({int(g) for g in self.group_beob})
|
||||
sigma_gruppen: Dict[int, float] = {}
|
||||
for g in gruppen:
|
||||
idx = [i for i in range(self.n_beob)
|
||||
if int(self.group_beob[i, 0]) == g]
|
||||
if not idx:
|
||||
continue
|
||||
|
||||
v_g = sp.Matrix([v[i, 0] for i in idx])
|
||||
P_g = sp.zeros(len(idx), len(idx))
|
||||
for k, i_beob in enumerate(idx):
|
||||
P_g[k, k] = P[i_beob, i_beob]
|
||||
r_g = sum(r[i_beob, 0] for i_beob in idx)
|
||||
sigma_gruppe_g = (v_g.T * P_g * v_g)[0, 0] / r_g
|
||||
sigma_gruppen[g] = float(sigma_gruppe_g)
|
||||
return sigma_gruppen
|
||||
|
||||
|
||||
def update_sigma0_von_vks(self, sigma_hat: Dict[int, float]) -> None:
|
||||
for g, val in sigma_hat.items():
|
||||
self.sigma0_groups[int(g)] = float(val)
|
||||
return r #Redundanzanteile
|
||||
56
Tests_Michelle/Parameterschaetzung_müll.py
Normal file
56
Tests_Michelle/Parameterschaetzung_müll.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from typing import Dict, Any
|
||||
import sympy as sp
|
||||
from Stochastisches_Modell import StochastischesModell
|
||||
|
||||
def iterative_ausgleichung(
|
||||
A: sp.Matrix,
|
||||
dl: sp.Matrix,
|
||||
modell: StochastischesModell,
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
ergebnisse_iter = [] #Liste für Zwischenergebnisse
|
||||
|
||||
for it in range(max_iter):
|
||||
Q_ll, P = modell.berechne_Qll_P() #Stochastisches Modell: Qll und P berechnen
|
||||
|
||||
N = A.T * P * A #Normalgleichungsmatrix N
|
||||
Q_xx = N.inv() #Kofaktormatrix der Unbekannten Qxx
|
||||
n = A.T * P * dl #Absolutgliedvektor n
|
||||
|
||||
dx = N.LUsolve(n) #Zuschlagsvektor dx
|
||||
|
||||
v = dl - A * dx #Residuenvektor v
|
||||
|
||||
Q_vv = modell.berechne_Qvv(A, P, Q_xx) #Kofaktormatrix der Verbesserungen Qvv
|
||||
R = modell.berechne_R(Q_vv, P) #Redundanzmatrix R
|
||||
r = modell.berechne_r(R) #Redundanzanteile als Vektor r
|
||||
|
||||
ergebnisse_iter.append({ #Zwischenergebnisse speichern in Liste
|
||||
"iter": it + 1,
|
||||
"Q_ll": Q_ll,
|
||||
"P": P,
|
||||
"N": N,
|
||||
"Q_xx": Q_xx,
|
||||
"dx": dx,
|
||||
"v": v,
|
||||
"Q_vv": Q_vv,
|
||||
"R": R,
|
||||
"r": r,
|
||||
"sigma_hat": sigma_hat,
|
||||
"sigma0_groups": dict(modell.sigma0_groups),
|
||||
})
|
||||
|
||||
return {
|
||||
"dx": dx,
|
||||
"v": v,
|
||||
"Q_ll": Q_ll,
|
||||
"P": P,
|
||||
"N": N,
|
||||
"Q_xx": Q_xx,
|
||||
"Q_vv": Q_vv,
|
||||
"R": R,
|
||||
"r": r,
|
||||
"sigma_hat": sigma_hat,
|
||||
"sigma0_groups": dict(modell.sigma0_groups),
|
||||
"history": ergebnisse_iter,
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
# Transformation ITRF2020 --> ETRF89/DREF91 Realisierung 2025
|
||||
|
||||
import sympy as sp
|
||||
from Einheitenumrechnung import Einheitenumrechnung
|
||||
from Berechnungen import Einheitenumrechnung
|
||||
|
||||
# Helmetert Paramteter zur Referenzepoche t0
|
||||
t0 = 2015.0
|
||||
|
||||
5798
Zwischenergebnisse/Ausgleichung_Iteration0.csv
Normal file
5798
Zwischenergebnisse/Ausgleichung_Iteration0.csv
Normal file
File diff suppressed because one or more lines are too long
2629
Zwischenergebnisse/Beobachtungsvektor_Numerisch.csv
Normal file
2629
Zwischenergebnisse/Beobachtungsvektor_Numerisch.csv
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
2629
Zwischenergebnisse/Beobachtungsvektor_Näherung_Symbolisch.csv
Normal file
2629
Zwischenergebnisse/Beobachtungsvektor_Näherung_Symbolisch.csv
Normal file
File diff suppressed because it is too large
Load Diff
2629
Zwischenergebnisse/Jacobi_Matrix_Numerisch_Iteration0.csv
Normal file
2629
Zwischenergebnisse/Jacobi_Matrix_Numerisch_Iteration0.csv
Normal file
File diff suppressed because it is too large
Load Diff
2629
Zwischenergebnisse/Jacobi_Matrix_Symbolisch.csv
Normal file
2629
Zwischenergebnisse/Jacobi_Matrix_Symbolisch.csv
Normal file
File diff suppressed because it is too large
Load Diff
2629
Zwischenergebnisse/Qll_Numerisch.csv
Normal file
2629
Zwischenergebnisse/Qll_Numerisch.csv
Normal file
File diff suppressed because one or more lines are too long
2102
Zwischenergebnisse/Qll_Symbolisch.csv
Normal file
2102
Zwischenergebnisse/Qll_Symbolisch.csv
Normal file
File diff suppressed because one or more lines are too long
178
Zwischenergebnisse/Unbekanntenvektor_Numerisch_Iteration0.csv
Normal file
178
Zwischenergebnisse/Unbekanntenvektor_Numerisch_Iteration0.csv
Normal file
@@ -0,0 +1,178 @@
|
||||
Unbekanntenvektor;
|
||||
X10009;3794737,8261094868
|
||||
Y10009;546742,68877082553
|
||||
Z10009;5080128,3507065025
|
||||
X10006;3794737,3173918251
|
||||
Y10006;546711,81881005680
|
||||
Z10006;5080139,1948981553
|
||||
X10010;3794749,9062764374
|
||||
Y10010;546779,89873060947
|
||||
Z10010;5080150,0065082129
|
||||
X10018;3794746,3410212440
|
||||
Y10018;546805,36274589579
|
||||
Z10018;5080131,2606735807
|
||||
X10008;3794766,3320322788
|
||||
Y10008;546753,35065436338
|
||||
Z10008;5080126,6029089628
|
||||
X10005;3794766,1208143157
|
||||
Y10005;546724,06052930078
|
||||
Z10005;5080116,5199339003
|
||||
X10003;3794815,3173410210
|
||||
Y10003;546732,77828696876
|
||||
Z10003;5080080,1724589822
|
||||
X10004;3794777,6376987905
|
||||
Y10004;546715,83150049957
|
||||
Z10004;5080113,1123126568
|
||||
X10007;3794837,5286159980
|
||||
Y10007;546770,79318297003
|
||||
Z10007;5080113,7809357923
|
||||
X10001;3794877,4442163322
|
||||
Y10001;546736,00280125012
|
||||
Z10001;5080033,9233738090
|
||||
X10002;3794847,2705692228
|
||||
Y10002;546727,23776782755
|
||||
Z10002;5080068,6550395742
|
||||
X10016;3794821,5493086762
|
||||
Y10016;546795,35855828324
|
||||
Z10016;5080096,3781247181
|
||||
X10011;3794865,2733347461
|
||||
Y10011;546819,04011517813
|
||||
Z10011;5080007,2618007053
|
||||
X10026;3794745,3602895556
|
||||
Y10026;546838,50900056992
|
||||
Z10026;5080137,0151585320
|
||||
X10027;3794739,3149203953
|
||||
Y10027;546879,33793638616
|
||||
Z10027;5080111,1458846316
|
||||
X10043;3794734,8351122441
|
||||
Y10043;546926,68930651186
|
||||
Z10043;5080112,5813005353
|
||||
X10044;3794729,0623322332
|
||||
Y10044;546959,86250598939
|
||||
Z10044;5080088,3333714346
|
||||
X10021;3794756,6131819634
|
||||
Y10021;546837,75365389517
|
||||
Z10021;5080107,5121020472
|
||||
X10020;3794765,2080349228
|
||||
Y10020;546838,53030298970
|
||||
Z10020;5080104,5111676977
|
||||
X10024;3794750,8358620867
|
||||
Y10024;546859,63394459803
|
||||
Z10024;5080100,5394282307
|
||||
X10025;3794751,7663267483
|
||||
Y10025;546873,57347223762
|
||||
Z10025;5080095,0451717615
|
||||
X10022;3794759,1895938955
|
||||
Y10022;546845,28401077731
|
||||
Z10022;5080103,5899291664
|
||||
X10023;3794761,9526928161
|
||||
Y10023;546851,33336517339
|
||||
Z10023;5080100,5059386708
|
||||
X10019;3794800,8740875486
|
||||
Y10019;546840,94087227260
|
||||
Z10019;5080110,3273932595
|
||||
X10033;3794796,8531006654
|
||||
Y10033;546879,07184937566
|
||||
Z10033;5080094,0204517832
|
||||
X10017;3794800,7678644184
|
||||
Y10017;546828,68122233710
|
||||
Z10017;5080065,2825882381
|
||||
X10052;3794730,1960150051
|
||||
Y10052;546990,10831652910
|
||||
Z10052;5080096,2656965254
|
||||
X10042;3794728,5162126020
|
||||
Y10042;546934,34006903819
|
||||
Z10042;5080079,7273739038
|
||||
X10053;3794719,6916959078
|
||||
Y10053;547015,48054781071
|
||||
Z10053;5080068,7703563867
|
||||
X10037;3794772,9778814467
|
||||
Y10037;546952,92013359477
|
||||
Z10037;5080046,8109239414
|
||||
X10040;3794739,6030496521
|
||||
Y10040;546947,75984157427
|
||||
Z10040;5080048,1969198893
|
||||
X10041;3794730,1824448139
|
||||
Y10041;546914,19811434475
|
||||
Z10041;5080049,3500350653
|
||||
X10038;3794782,7017423193
|
||||
Y10038;546922,48386199018
|
||||
Z10038;5080051,8118791558
|
||||
X10051;3794745,1460916233
|
||||
Y10051;546988,18094585355
|
||||
Z10051;5080068,9972397214
|
||||
X10036;3794786,0750212390
|
||||
Y10036;546959,93132156577
|
||||
Z10036;5080032,2974019706
|
||||
X10035;3794812,6082424349
|
||||
Y10035;546947,26188674066
|
||||
Z10035;5080006,7163449917
|
||||
X10039;3794777,7914583969
|
||||
Y10039;546907,18289460418
|
||||
Z10039;5080055,0958695237
|
||||
X10059;3794705,8658490042
|
||||
Y10059;547076,03198452389
|
||||
Z10059;5080058,6055812442
|
||||
X10050;3794734,7983456069
|
||||
Y10050;547006,91683443581
|
||||
Z10050;5080052,2874619229
|
||||
X10013;3794826,9382148853
|
||||
Y10013;546821,50874597263
|
||||
Z10013;5080049,8667718192
|
||||
X10028;3794859,0233499698
|
||||
Y10028;546892,27846656784
|
||||
Z10028;5079987,7607363630
|
||||
X10012;3794821,3323145389
|
||||
Y10012;546795,84225596000
|
||||
Z10012;5080043,3879320889
|
||||
X10014;3794813,7871761550
|
||||
Y10014;546807,87279685993
|
||||
Z10014;5080059,9679990845
|
||||
X10031;3794800,9671774456
|
||||
Y10031;546875,13272652383
|
||||
Z10031;5080058,5378277475
|
||||
X10015;3794825,3025951464
|
||||
Y10015;546794,34543982906
|
||||
Z10015;5080076,0668343443
|
||||
X10032;3794744,6965562960
|
||||
Y10032;546866,50282400186
|
||||
Z10032;5080024,1065637068
|
||||
X10030;3794813,4354968976
|
||||
Y10030;546887,79185455695
|
||||
Z10030;5080021,1584832004
|
||||
X10029;3794812,4058714940
|
||||
Y10029;546901,17878684785
|
||||
Z10029;5080017,3976387208
|
||||
X10034;3794851,7492064292
|
||||
Y10034;546947,45549867210
|
||||
Z10034;5079974,2108252074
|
||||
X10045;3794857,0124008224
|
||||
Y10045;547004,59141104372
|
||||
Z10045;5079971,9726825530
|
||||
X10049;3794751,2572443298
|
||||
Y10049;547011,56194730279
|
||||
Z10049;5080031,2696144487
|
||||
X10047;3794794,0888480879
|
||||
Y10047;547003,25526369818
|
||||
Z10047;5079996,4270136360
|
||||
X10046;3794809,3628738735
|
||||
Y10046;546996,66390205411
|
||||
Z10046;5079986,7426119278
|
||||
X10048;3794772,8513984394
|
||||
Y10048;547004,79019417693
|
||||
Z10048;5080014,6234130393
|
||||
X10057;3794763,2886457477
|
||||
Y10057;547065,11840483252
|
||||
Z10057;5080003,6886467107
|
||||
X10055;3794799,7076102519
|
||||
Y10055;547064,35354224121
|
||||
Z10055;5079974,0101138130
|
||||
X10054;3794845,9912677171
|
||||
Y10054;547061,68456488943
|
||||
Z10054;5079930,5760331770
|
||||
X10058;3794736,1075504871
|
||||
Y10058;547085,20337710682
|
||||
Z10058;5080034,3588629889
|
||||
X10056;3794790,3311639278
|
||||
Y10056;547079,62319395720
|
||||
Z10056;5079984,3166781181
|
||||
|
178
Zwischenergebnisse/Unbekanntenvektor_Symbolisch.csv
Normal file
178
Zwischenergebnisse/Unbekanntenvektor_Symbolisch.csv
Normal file
@@ -0,0 +1,178 @@
|
||||
Unbekanntenvektor;
|
||||
X10009;X10009
|
||||
Y10009;Y10009
|
||||
Z10009;Z10009
|
||||
X10006;X10006
|
||||
Y10006;Y10006
|
||||
Z10006;Z10006
|
||||
X10010;X10010
|
||||
Y10010;Y10010
|
||||
Z10010;Z10010
|
||||
X10018;X10018
|
||||
Y10018;Y10018
|
||||
Z10018;Z10018
|
||||
X10008;X10008
|
||||
Y10008;Y10008
|
||||
Z10008;Z10008
|
||||
X10005;X10005
|
||||
Y10005;Y10005
|
||||
Z10005;Z10005
|
||||
X10003;X10003
|
||||
Y10003;Y10003
|
||||
Z10003;Z10003
|
||||
X10004;X10004
|
||||
Y10004;Y10004
|
||||
Z10004;Z10004
|
||||
X10007;X10007
|
||||
Y10007;Y10007
|
||||
Z10007;Z10007
|
||||
X10001;X10001
|
||||
Y10001;Y10001
|
||||
Z10001;Z10001
|
||||
X10002;X10002
|
||||
Y10002;Y10002
|
||||
Z10002;Z10002
|
||||
X10016;X10016
|
||||
Y10016;Y10016
|
||||
Z10016;Z10016
|
||||
X10011;X10011
|
||||
Y10011;Y10011
|
||||
Z10011;Z10011
|
||||
X10026;X10026
|
||||
Y10026;Y10026
|
||||
Z10026;Z10026
|
||||
X10027;X10027
|
||||
Y10027;Y10027
|
||||
Z10027;Z10027
|
||||
X10043;X10043
|
||||
Y10043;Y10043
|
||||
Z10043;Z10043
|
||||
X10044;X10044
|
||||
Y10044;Y10044
|
||||
Z10044;Z10044
|
||||
X10021;X10021
|
||||
Y10021;Y10021
|
||||
Z10021;Z10021
|
||||
X10020;X10020
|
||||
Y10020;Y10020
|
||||
Z10020;Z10020
|
||||
X10024;X10024
|
||||
Y10024;Y10024
|
||||
Z10024;Z10024
|
||||
X10025;X10025
|
||||
Y10025;Y10025
|
||||
Z10025;Z10025
|
||||
X10022;X10022
|
||||
Y10022;Y10022
|
||||
Z10022;Z10022
|
||||
X10023;X10023
|
||||
Y10023;Y10023
|
||||
Z10023;Z10023
|
||||
X10019;X10019
|
||||
Y10019;Y10019
|
||||
Z10019;Z10019
|
||||
X10033;X10033
|
||||
Y10033;Y10033
|
||||
Z10033;Z10033
|
||||
X10017;X10017
|
||||
Y10017;Y10017
|
||||
Z10017;Z10017
|
||||
X10052;X10052
|
||||
Y10052;Y10052
|
||||
Z10052;Z10052
|
||||
X10042;X10042
|
||||
Y10042;Y10042
|
||||
Z10042;Z10042
|
||||
X10053;X10053
|
||||
Y10053;Y10053
|
||||
Z10053;Z10053
|
||||
X10037;X10037
|
||||
Y10037;Y10037
|
||||
Z10037;Z10037
|
||||
X10040;X10040
|
||||
Y10040;Y10040
|
||||
Z10040;Z10040
|
||||
X10041;X10041
|
||||
Y10041;Y10041
|
||||
Z10041;Z10041
|
||||
X10038;X10038
|
||||
Y10038;Y10038
|
||||
Z10038;Z10038
|
||||
X10051;X10051
|
||||
Y10051;Y10051
|
||||
Z10051;Z10051
|
||||
X10036;X10036
|
||||
Y10036;Y10036
|
||||
Z10036;Z10036
|
||||
X10035;X10035
|
||||
Y10035;Y10035
|
||||
Z10035;Z10035
|
||||
X10039;X10039
|
||||
Y10039;Y10039
|
||||
Z10039;Z10039
|
||||
X10059;X10059
|
||||
Y10059;Y10059
|
||||
Z10059;Z10059
|
||||
X10050;X10050
|
||||
Y10050;Y10050
|
||||
Z10050;Z10050
|
||||
X10013;X10013
|
||||
Y10013;Y10013
|
||||
Z10013;Z10013
|
||||
X10028;X10028
|
||||
Y10028;Y10028
|
||||
Z10028;Z10028
|
||||
X10012;X10012
|
||||
Y10012;Y10012
|
||||
Z10012;Z10012
|
||||
X10014;X10014
|
||||
Y10014;Y10014
|
||||
Z10014;Z10014
|
||||
X10031;X10031
|
||||
Y10031;Y10031
|
||||
Z10031;Z10031
|
||||
X10015;X10015
|
||||
Y10015;Y10015
|
||||
Z10015;Z10015
|
||||
X10032;X10032
|
||||
Y10032;Y10032
|
||||
Z10032;Z10032
|
||||
X10030;X10030
|
||||
Y10030;Y10030
|
||||
Z10030;Z10030
|
||||
X10029;X10029
|
||||
Y10029;Y10029
|
||||
Z10029;Z10029
|
||||
X10034;X10034
|
||||
Y10034;Y10034
|
||||
Z10034;Z10034
|
||||
X10045;X10045
|
||||
Y10045;Y10045
|
||||
Z10045;Z10045
|
||||
X10049;X10049
|
||||
Y10049;Y10049
|
||||
Z10049;Z10049
|
||||
X10047;X10047
|
||||
Y10047;Y10047
|
||||
Z10047;Z10047
|
||||
X10046;X10046
|
||||
Y10046;Y10046
|
||||
Z10046;Z10046
|
||||
X10048;X10048
|
||||
Y10048;Y10048
|
||||
Z10048;Z10048
|
||||
X10057;X10057
|
||||
Y10057;Y10057
|
||||
Z10057;Z10057
|
||||
X10055;X10055
|
||||
Y10055;Y10055
|
||||
Z10055;Z10055
|
||||
X10054;X10054
|
||||
Y10054;Y10054
|
||||
Z10054;Z10054
|
||||
X10058;X10058
|
||||
Y10058;Y10058
|
||||
Z10058;Z10058
|
||||
X10056;X10056
|
||||
Y10056;Y10056
|
||||
Z10056;Z10056
|
||||
|
Reference in New Issue
Block a user