2025-01-03 15:50:56 +01:00
|
|
|
package maintenance
|
2025-01-02 21:42:20 +01:00
|
|
|
|
|
|
|
|
import (
|
2025-08-11 07:46:53 +02:00
|
|
|
"bamort/config"
|
2025-01-02 21:42:20 +01:00
|
|
|
"bamort/database"
|
2025-08-11 07:46:53 +02:00
|
|
|
"bamort/logger"
|
2025-07-27 23:13:04 +02:00
|
|
|
"bamort/models"
|
2025-01-02 21:42:20 +01:00
|
|
|
"bamort/user"
|
2025-07-24 07:39:43 +02:00
|
|
|
"fmt"
|
2025-01-02 21:42:20 +01:00
|
|
|
"net/http"
|
2025-07-24 07:39:43 +02:00
|
|
|
"os"
|
|
|
|
|
"path/filepath"
|
2026-01-02 12:07:28 +01:00
|
|
|
"reflect"
|
2025-07-29 08:38:29 +02:00
|
|
|
"strings"
|
2025-01-02 21:42:20 +01:00
|
|
|
|
|
|
|
|
"github.com/gin-gonic/gin"
|
2025-07-24 07:39:43 +02:00
|
|
|
"gorm.io/driver/sqlite"
|
|
|
|
|
"gorm.io/gorm"
|
2025-07-29 09:42:53 +02:00
|
|
|
"gorm.io/gorm/clause"
|
2025-01-02 21:42:20 +01:00
|
|
|
)
|
|
|
|
|
|
2025-07-24 07:39:43 +02:00
|
|
|
// Constants for test data management
|
|
|
|
|
var (
|
|
|
|
|
testDataDir = database.TestDataDir
|
|
|
|
|
preparedTestDB = database.PreparedTestDB
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
func respondWithError(c *gin.Context, status int, message string) {
|
|
|
|
|
c.JSON(status, gin.H{"error": message})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// migrateAllStructures migrates all database structures to the provided database
|
|
|
|
|
func migrateAllStructures(db *gorm.DB) error {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Starte Migration aller Datenbankstrukturen...")
|
|
|
|
|
|
2025-07-24 07:39:43 +02:00
|
|
|
// Migrate all structures in the correct order
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Migriere Datenbankstrukturen...")
|
2025-07-24 07:39:43 +02:00
|
|
|
if err := database.MigrateStructure(db); err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Migrieren der Datenbankstrukturen: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to migrate database structures: %w", err)
|
|
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
|
|
|
|
|
logger.Debug("Migriere Benutzerstrukturen...")
|
2025-07-24 07:39:43 +02:00
|
|
|
if err := user.MigrateStructure(db); err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Migrieren der Benutzerstrukturen: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to migrate user structures: %w", err)
|
|
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
|
|
|
|
|
logger.Debug("Migriere GSMaster-Strukturen...")
|
2025-07-28 20:55:07 +02:00
|
|
|
if err := models.MigrateStructure(db); err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Migrieren der GSMaster-Strukturen: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to migrate gsmaster structures: %w", err)
|
|
|
|
|
}
|
2025-07-29 07:59:40 +02:00
|
|
|
|
2025-07-29 08:38:29 +02:00
|
|
|
/*if err := importer.MigrateStructure(db); err != nil {
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to migrate importer structures: %w", err)
|
2025-07-29 08:38:29 +02:00
|
|
|
}*/
|
2025-08-11 07:46:53 +02:00
|
|
|
|
|
|
|
|
logger.Info("Migration aller Datenbankstrukturen erfolgreich abgeschlossen")
|
2025-07-24 07:39:43 +02:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-07 09:41:51 +02:00
|
|
|
func migrateDataIfNeeded(db *gorm.DB) error {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Starte Datenmigration falls erforderlich...")
|
|
|
|
|
|
2026-01-25 21:34:25 +01:00
|
|
|
err := database.MigrateDataIfNeeded(db)
|
|
|
|
|
if err != nil {
|
|
|
|
|
logger.Error("Fehler beim Migrieren der Datenbankdaten: %s", err.Error())
|
|
|
|
|
return fmt.Errorf("failed to migrate database data: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-07 09:41:51 +02:00
|
|
|
// Kopiere categorie nach learning_category für Spells, wenn learning_category leer ist
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Migriere Spell Learning Categories...")
|
2026-01-25 21:34:25 +01:00
|
|
|
err = migrateSpellLearningCategories(db)
|
2025-08-07 09:41:51 +02:00
|
|
|
if err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Migrieren der Spell Learning Categories: %s", err.Error())
|
2025-08-07 09:41:51 +02:00
|
|
|
return fmt.Errorf("failed to migrate spell learning categories: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Datenmigration erfolgreich abgeschlossen")
|
2025-08-07 09:41:51 +02:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// migrateSpellLearningCategories kopiert categorie-Werte in learning_category wenn diese leer sind
|
|
|
|
|
func migrateSpellLearningCategories(db *gorm.DB) error {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Starte Migration der Spell Learning Categories...")
|
|
|
|
|
|
2025-08-07 09:41:51 +02:00
|
|
|
// SQL-Statement um categorie nach learning_category zu kopieren, wo learning_category leer oder NULL ist
|
|
|
|
|
sql := `
|
|
|
|
|
UPDATE gsm_spells
|
|
|
|
|
SET learning_category = category
|
|
|
|
|
WHERE (learning_category IS NULL OR learning_category = '')
|
|
|
|
|
AND category IS NOT NULL
|
|
|
|
|
AND category != ''
|
|
|
|
|
`
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Führe SQL-Update aus: %s", strings.ReplaceAll(sql, "\n", " "))
|
2025-08-07 09:41:51 +02:00
|
|
|
result := db.Exec(sql)
|
|
|
|
|
if result.Error != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim SQL-Update der Spell Learning Categories: %s", result.Error.Error())
|
2025-08-07 09:41:51 +02:00
|
|
|
return fmt.Errorf("failed to update spell learning categories: %w", result.Error)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Log der Anzahl der aktualisierten Datensätze
|
|
|
|
|
if result.RowsAffected > 0 {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Updated %d spell records with learning_category from categorie", result.RowsAffected)
|
2025-08-07 09:41:51 +02:00
|
|
|
fmt.Printf("Updated %d spell records with learning_category from categorie\n", result.RowsAffected)
|
2025-08-11 07:46:53 +02:00
|
|
|
} else {
|
|
|
|
|
logger.Debug("Keine Spell-Datensätze benötigten ein Update der learning_category")
|
2025-08-07 09:41:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-24 07:39:43 +02:00
|
|
|
func MakeTestdataFromLive(c *gin.Context) {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Starte Testdaten-Erstellung aus Live-Datenbank...")
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
liveDB := database.ConnectDatabase()
|
|
|
|
|
if liveDB == nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Verbinden mit der Live-Datenbank")
|
2025-07-29 07:59:40 +02:00
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to connect to live database")
|
2025-01-18 20:59:54 +01:00
|
|
|
return
|
2025-01-02 21:42:20 +01:00
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Erfolgreich mit Live-Datenbank verbunden")
|
2025-07-24 07:39:43 +02:00
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// Live-Datenbank in SQLite-Datei kopieren
|
|
|
|
|
backupFile := preparedTestDB
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Kopiere Live-Datenbank nach: %s", backupFile)
|
2025-07-29 07:59:40 +02:00
|
|
|
err := copyLiveDatabaseToFile(liveDB, backupFile)
|
2025-01-02 21:42:20 +01:00
|
|
|
if err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Kopieren der Datenbank: %s", err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
respondWithError(c, http.StatusInternalServerError, fmt.Sprintf("Failed to copy database: %v", err))
|
2025-01-18 20:59:54 +01:00
|
|
|
return
|
2025-01-02 21:42:20 +01:00
|
|
|
}
|
2025-07-24 07:39:43 +02:00
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Live-Datenbank erfolgreich in Datei kopiert: %s", backupFile)
|
2025-07-24 07:39:43 +02:00
|
|
|
c.JSON(http.StatusOK, gin.H{
|
2025-07-29 07:59:40 +02:00
|
|
|
"message": "Live database copied to file successfully",
|
|
|
|
|
"test_data_file": backupFile,
|
2025-07-24 07:39:43 +02:00
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-29 08:38:29 +02:00
|
|
|
// CopyLiveDatabaseToFile kopiert die MariaDB-Datenbank in eine SQLite-Datei (exported for testing)
|
|
|
|
|
func CopyLiveDatabaseToFile(liveDB *gorm.DB, targetFile string) error {
|
|
|
|
|
return copyLiveDatabaseToFile(liveDB, targetFile)
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// copyLiveDatabaseToFile kopiert die MariaDB-Datenbank in eine SQLite-Datei
|
|
|
|
|
func copyLiveDatabaseToFile(liveDB *gorm.DB, targetFile string) error {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Starte Kopiervorgang von Live-DB nach SQLite-Datei: %s", targetFile)
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// Verzeichnis erstellen falls es nicht existiert
|
|
|
|
|
dir := filepath.Dir(targetFile)
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Erstelle Zielverzeichnis falls erforderlich: %s", dir)
|
2025-07-29 07:59:40 +02:00
|
|
|
if err := os.MkdirAll(dir, 0755); err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Erstellen des Verzeichnisses %s: %s", dir, err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
return fmt.Errorf("failed to create directory: %w", err)
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// Backup der existierenden Datei erstellen
|
|
|
|
|
if _, err := os.Stat(targetFile); err == nil {
|
|
|
|
|
backupFile := targetFile + ".backup"
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Existierende Datei gefunden, erstelle Backup: %s", backupFile)
|
2025-07-29 07:59:40 +02:00
|
|
|
os.Remove(backupFile) // Alte Backup entfernen
|
|
|
|
|
if err := os.Rename(targetFile, backupFile); err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Erstellen des Backups %s: %s", backupFile, err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
return fmt.Errorf("failed to backup existing file: %w", err)
|
|
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Backup erfolgreich erstellt")
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// SQLite-Zieldatenbank erstellen
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Erstelle neue SQLite-Zieldatenbank: %s", targetFile)
|
2025-07-29 07:59:40 +02:00
|
|
|
targetDB, err := gorm.Open(sqlite.Open(targetFile), &gorm.Config{})
|
2025-07-24 07:39:43 +02:00
|
|
|
if err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Erstellen der SQLite-Zieldatenbank: %s", err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
return fmt.Errorf("failed to create target SQLite database: %w", err)
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
2025-07-29 07:59:40 +02:00
|
|
|
defer func() {
|
|
|
|
|
if sqlDB, err := targetDB.DB(); err == nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Schließe SQLite-Datenbankverbindung")
|
2025-07-29 07:59:40 +02:00
|
|
|
sqlDB.Close()
|
|
|
|
|
}
|
|
|
|
|
}()
|
2025-07-24 07:39:43 +02:00
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// Strukturen in SQLite-DB migrieren
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Migriere Strukturen in SQLite-Datenbank...")
|
2025-07-29 07:59:40 +02:00
|
|
|
if err := migrateAllStructures(targetDB); err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Migrieren der Strukturen in SQLite: %s", err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
return fmt.Errorf("failed to migrate structures to SQLite: %w", err)
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// Daten von MariaDB zu SQLite kopieren
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Kopiere Daten von MariaDB zu SQLite...")
|
2025-07-29 07:59:40 +02:00
|
|
|
if err := copyMariaDBToSQLite(liveDB, targetDB); err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Kopieren der Daten von MariaDB zu SQLite: %s", err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
return fmt.Errorf("failed to copy data from MariaDB to SQLite: %w", err)
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Kopiervorgang erfolgreich abgeschlossen")
|
2025-07-29 07:59:40 +02:00
|
|
|
return nil
|
|
|
|
|
}
|
2025-07-24 07:39:43 +02:00
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// copyMariaDBToSQLite kopiert alle Daten von MariaDB zu SQLite
|
|
|
|
|
func copyMariaDBToSQLite(mariaDB, sqliteDB *gorm.DB) error {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Starte Kopiervorgang aller Daten von MariaDB zu SQLite...")
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// Vollständige Liste aller Strukturen mit GORM-Tags in der richtigen Reihenfolge
|
|
|
|
|
// (Basis-Tabellen zuerst wegen Foreign Key-Abhängigkeiten)
|
|
|
|
|
tables := []interface{}{
|
|
|
|
|
// Basis-Strukturen (keine Abhängigkeiten)
|
|
|
|
|
&user.User{},
|
|
|
|
|
|
|
|
|
|
// Learning Costs System - Basis
|
|
|
|
|
&models.Source{},
|
|
|
|
|
&models.CharacterClass{},
|
|
|
|
|
&models.SkillCategory{},
|
|
|
|
|
&models.SkillDifficulty{},
|
|
|
|
|
&models.SpellSchool{},
|
2026-01-14 21:13:25 +01:00
|
|
|
&models.MiscLookup{},
|
2025-07-29 07:59:40 +02:00
|
|
|
|
|
|
|
|
// Learning Costs System - Abhängige Tabellen
|
|
|
|
|
&models.ClassCategoryEPCost{},
|
|
|
|
|
&models.ClassSpellSchoolEPCost{},
|
|
|
|
|
&models.SpellLevelLECost{},
|
|
|
|
|
&models.SkillCategoryDifficulty{},
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.WeaponSkillCategoryDifficulty{},
|
2025-07-29 07:59:40 +02:00
|
|
|
&models.SkillImprovementCost{},
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.ClassCategoryLearningPoints{},
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.ClassSpellPoints{},
|
|
|
|
|
&models.ClassTypicalSkill{},
|
|
|
|
|
&models.ClassTypicalSpell{},
|
2025-07-29 07:59:40 +02:00
|
|
|
|
|
|
|
|
// GSMaster Basis-Daten
|
2025-07-30 05:40:13 +02:00
|
|
|
//&models.LookupList{}, // Basis für Skills, Spells, Equipment
|
2025-07-29 07:59:40 +02:00
|
|
|
&models.Skill{},
|
|
|
|
|
&models.WeaponSkill{},
|
|
|
|
|
&models.Spell{},
|
|
|
|
|
&models.Equipment{},
|
|
|
|
|
&models.Weapon{},
|
|
|
|
|
&models.Container{},
|
|
|
|
|
&models.Transportation{},
|
|
|
|
|
&models.Believe{},
|
|
|
|
|
|
|
|
|
|
// Charaktere (Basis)
|
|
|
|
|
&models.Char{},
|
|
|
|
|
|
|
|
|
|
// Charakter-Eigenschaften (abhängig von Char)
|
|
|
|
|
&models.Eigenschaft{},
|
|
|
|
|
&models.Lp{},
|
|
|
|
|
&models.Ap{},
|
|
|
|
|
&models.B{},
|
|
|
|
|
&models.Merkmale{},
|
|
|
|
|
&models.Erfahrungsschatz{},
|
|
|
|
|
&models.Bennies{},
|
|
|
|
|
&models.Vermoegen{},
|
|
|
|
|
|
|
|
|
|
// Charakter-Skills (abhängig von Char und Skills)
|
|
|
|
|
&models.SkFertigkeit{},
|
|
|
|
|
&models.SkWaffenfertigkeit{},
|
|
|
|
|
&models.SkAngeboreneFertigkeit{},
|
|
|
|
|
&models.SkZauber{},
|
|
|
|
|
|
|
|
|
|
// Charakter-Equipment (abhängig von Char und Equipment)
|
|
|
|
|
&models.EqAusruestung{},
|
|
|
|
|
&models.EqWaffe{},
|
|
|
|
|
&models.EqContainer{},
|
|
|
|
|
|
2026-01-12 16:36:35 +01:00
|
|
|
// Character Creation Sessions (abhängig von Char)
|
|
|
|
|
&models.CharacterCreationSession{},
|
|
|
|
|
|
|
|
|
|
// Audit Logging (abhängig von Char)
|
|
|
|
|
&models.AuditLogEntry{},
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// View-Strukturen ohne eigene Tabellen werden nicht kopiert:
|
|
|
|
|
// SkillLearningInfo, SpellLearningInfo, CharList, FeChar, etc.
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Kopiere Daten für %d Tabellen...", len(tables))
|
|
|
|
|
for i, model := range tables {
|
|
|
|
|
logger.Debug("Kopiere Tabelle %d/%d: %T", i+1, len(tables), model)
|
2025-07-29 07:59:40 +02:00
|
|
|
if err := copyTableData(mariaDB, sqliteDB, model); err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Kopieren der Tabellendaten für %T: %s", model, err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
return fmt.Errorf("failed to copy table data for %T: %w", model, err)
|
|
|
|
|
}
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Alle Tabellendaten erfolgreich kopiert")
|
2025-07-29 07:59:40 +02:00
|
|
|
return nil
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// copyTableData kopiert alle Daten einer Tabelle von MariaDB zu SQLite
|
|
|
|
|
func copyTableData(sourceDB, targetDB *gorm.DB, model interface{}) error {
|
2025-08-11 07:46:53 +02:00
|
|
|
tableName := fmt.Sprintf("%T", model)
|
|
|
|
|
logger.Debug("Starte Kopiervorgang für Tabelle: %s", tableName)
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// Anzahl der Datensätze prüfen
|
2025-07-24 07:39:43 +02:00
|
|
|
var count int64
|
2025-07-29 08:38:29 +02:00
|
|
|
err := sourceDB.Model(model).Count(&count).Error
|
|
|
|
|
if err != nil {
|
|
|
|
|
// If table doesn't exist, skip silently (useful for testing with partial schemas)
|
|
|
|
|
if isTableNotExistError(err) {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Tabelle %s existiert nicht in der Quelle, überspringe", tableName)
|
2025-07-29 08:38:29 +02:00
|
|
|
return nil
|
|
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Zählen der Datensätze für %s: %s", tableName, err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
return err
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if count == 0 {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Tabelle %s ist leer, keine Daten zu kopieren", tableName)
|
2025-07-29 07:59:40 +02:00
|
|
|
return nil // Keine Daten zu kopieren
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Kopiere %d Datensätze für Tabelle %s", count, tableName)
|
|
|
|
|
|
2025-07-29 07:59:40 +02:00
|
|
|
// Daten in Blöcken kopieren (für große Tabellen)
|
2025-07-24 07:39:43 +02:00
|
|
|
batchSize := 100
|
2025-08-11 07:46:53 +02:00
|
|
|
totalBatches := (int(count) + batchSize - 1) / batchSize
|
|
|
|
|
|
2026-01-02 12:07:28 +01:00
|
|
|
// Get the element type for creating slice of records
|
|
|
|
|
modelType := reflect.TypeOf(model).Elem()
|
|
|
|
|
|
2025-07-24 07:39:43 +02:00
|
|
|
for offset := 0; offset < int(count); offset += batchSize {
|
2025-08-11 07:46:53 +02:00
|
|
|
batchNum := (offset / batchSize) + 1
|
|
|
|
|
logger.Debug("Kopiere Batch %d/%d für %s (Offset: %d, Limit: %d)", batchNum, totalBatches, tableName, offset, batchSize)
|
|
|
|
|
|
2026-01-02 12:07:28 +01:00
|
|
|
// Create a slice of the model type using reflection
|
|
|
|
|
sliceType := reflect.SliceOf(modelType)
|
|
|
|
|
recordsValue := reflect.MakeSlice(sliceType, 0, batchSize)
|
|
|
|
|
recordsPtr := reflect.New(sliceType)
|
|
|
|
|
recordsPtr.Elem().Set(recordsValue)
|
2025-07-29 07:59:40 +02:00
|
|
|
|
2026-01-02 12:07:28 +01:00
|
|
|
// Batch aus MariaDB lesen (use proper struct type instead of map)
|
|
|
|
|
if err := sourceDB.Model(model).Offset(offset).Limit(batchSize).Find(recordsPtr.Interface()).Error; err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Lesen von Batch %d für %s: %s", batchNum, tableName, err.Error())
|
2025-07-29 07:59:40 +02:00
|
|
|
return err
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2026-01-12 16:36:35 +01:00
|
|
|
// Get the records for iteration
|
|
|
|
|
recordsVal := recordsPtr.Elem()
|
|
|
|
|
if recordsVal.Len() == 0 {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Keine weiteren Datensätze für %s", tableName)
|
2025-07-24 07:39:43 +02:00
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-12 16:36:35 +01:00
|
|
|
// Batch in SQLite einfügen
|
|
|
|
|
// Use Save() instead of Create() to avoid GORM applying default values to zero values (e.g., false for booleans)
|
|
|
|
|
for i := 0; i < recordsVal.Len(); i++ {
|
|
|
|
|
record := recordsVal.Index(i).Addr().Interface()
|
|
|
|
|
if err := targetDB.Save(record).Error; err != nil {
|
|
|
|
|
logger.Error("Fehler beim Speichern von Datensatz in Batch %d für %s: %s", batchNum, tableName, err.Error())
|
|
|
|
|
return err
|
|
|
|
|
}
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
|
2026-01-12 16:36:35 +01:00
|
|
|
logger.Debug("Batch %d/%d für %s erfolgreich kopiert (%d Datensätze)", batchNum, totalBatches, tableName, recordsVal.Len())
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Tabelle %s erfolgreich kopiert (%d Datensätze total)", tableName, count)
|
2025-07-24 07:39:43 +02:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-29 08:38:29 +02:00
|
|
|
// isTableNotExistError checks if the error indicates a table doesn't exist
|
|
|
|
|
func isTableNotExistError(err error) bool {
|
|
|
|
|
errorMsg := err.Error()
|
|
|
|
|
return strings.Contains(errorMsg, "no such table") ||
|
|
|
|
|
strings.Contains(errorMsg, "doesn't exist") ||
|
|
|
|
|
strings.Contains(errorMsg, "Table") && strings.Contains(errorMsg, "doesn't exist")
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-24 07:39:43 +02:00
|
|
|
// LoadPredefinedTestDataFromFile loads predefined test data from a specific file into the provided database
|
|
|
|
|
func LoadPredefinedTestDataFromFile(targetDB *gorm.DB, dataFile string) error {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Lade vordefinierte Testdaten aus Datei: %s", dataFile)
|
|
|
|
|
|
2025-07-24 07:39:43 +02:00
|
|
|
// Check if file exists
|
|
|
|
|
if _, err := os.Stat(dataFile); os.IsNotExist(err) {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Vordefinierte Testdaten-Datei nicht gefunden: %s", dataFile)
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("predefined test data file not found: %s", dataFile)
|
|
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Testdaten-Datei existiert: %s", dataFile)
|
2025-07-24 07:39:43 +02:00
|
|
|
|
|
|
|
|
// Migrate structures to target DB
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Migriere Strukturen in Zieldatenbank...")
|
2025-07-24 07:39:43 +02:00
|
|
|
err := migrateAllStructures(targetDB)
|
|
|
|
|
if err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Migrieren der Strukturen: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to migrate structures: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Copy data from file database to target database
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Kopiere Testdaten in Zieldatenbank...")
|
2025-07-24 07:39:43 +02:00
|
|
|
err = copyDataFromFileToMemory(dataFile, targetDB)
|
|
|
|
|
if err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Kopieren der Testdaten: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to copy test data to database: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Vordefinierte Testdaten erfolgreich geladen")
|
2025-07-24 07:39:43 +02:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// LoadPredefinedTestData creates a new in-memory test database and loads predefined test data into it (HTTP handler)
|
|
|
|
|
// Todo I think this don't need to be a handler, but can be called directly
|
|
|
|
|
func LoadPredefinedTestData(c *gin.Context) {
|
|
|
|
|
|
|
|
|
|
testDataFile := filepath.Join(testDataDir, "predefined_test_data.db")
|
|
|
|
|
|
|
|
|
|
// Check if file exists
|
|
|
|
|
if _, err := os.Stat(testDataFile); os.IsNotExist(err) {
|
|
|
|
|
respondWithError(c, http.StatusNotFound, "Predefined test data file not found. Run MakeTestdataFromLive first.")
|
2025-01-18 20:59:54 +01:00
|
|
|
return
|
2025-01-02 21:42:20 +01:00
|
|
|
}
|
2025-07-24 07:39:43 +02:00
|
|
|
|
|
|
|
|
// Create new in-memory test database using SetupTestDB
|
|
|
|
|
database.SetupTestDB(true)
|
|
|
|
|
|
|
|
|
|
// Load test data using the predefined test data file (includes migrations)
|
|
|
|
|
err := LoadPredefinedTestDataFromFile(database.DB, preparedTestDB)
|
2025-01-02 21:42:20 +01:00
|
|
|
if err != nil {
|
2025-07-24 07:39:43 +02:00
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to load test data: "+err.Error())
|
2025-01-18 20:59:54 +01:00
|
|
|
return
|
2025-01-02 21:42:20 +01:00
|
|
|
}
|
2025-07-24 07:39:43 +02:00
|
|
|
|
|
|
|
|
// Get statistics about the loaded data
|
|
|
|
|
stats, err := getTestDataStatistics(database.DB)
|
2025-01-02 21:42:20 +01:00
|
|
|
if err != nil {
|
2025-07-24 07:39:43 +02:00
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to get test data statistics: "+err.Error())
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
c.JSON(http.StatusOK, gin.H{
|
|
|
|
|
"message": "Predefined test data loaded successfully into in-memory database",
|
|
|
|
|
"test_data_file": testDataFile,
|
|
|
|
|
"statistics": stats,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// copyDataFromFileToMemory copies data from a SQLite file to an in-memory database
|
|
|
|
|
func copyDataFromFileToMemory(sourceFile string, targetDB *gorm.DB) error {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Kopiere Daten von SQLite-Datei in Memory-Datenbank: %s", sourceFile)
|
|
|
|
|
|
2025-07-24 07:39:43 +02:00
|
|
|
// Copy all tables using ATTACH and INSERT
|
|
|
|
|
attachSQL := fmt.Sprintf("ATTACH DATABASE '%s' AS source", sourceFile)
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Hänge Quell-Datenbank an: %s", attachSQL)
|
2025-07-24 07:39:43 +02:00
|
|
|
if err := targetDB.Exec(attachSQL).Error; err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Anhängen der Quell-Datenbank: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to attach source database: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get list of tables from source database
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Ermittle Tabellenliste aus Quell-Datenbank...")
|
2025-07-24 07:39:43 +02:00
|
|
|
var tables []string
|
|
|
|
|
if err := targetDB.Raw("SELECT name FROM source.sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'").Scan(&tables).Error; err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Ermitteln der Tabellenliste: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to get table list: %w", err)
|
|
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Gefundene Tabellen zum Kopieren: %d (%v)", len(tables), tables)
|
2025-07-24 07:39:43 +02:00
|
|
|
|
|
|
|
|
// Copy each table
|
2025-08-11 07:46:53 +02:00
|
|
|
for i, table := range tables {
|
|
|
|
|
logger.Debug("Kopiere Tabelle %d/%d: %s", i+1, len(tables), table)
|
2025-07-24 07:39:43 +02:00
|
|
|
copySQL := fmt.Sprintf("INSERT OR REPLACE INTO main.%s SELECT * FROM source.%s", table, table)
|
|
|
|
|
if err := targetDB.Exec(copySQL).Error; err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Kopieren der Tabelle %s: %s", table, err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to copy table %s: %w", table, err)
|
|
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Tabelle %s erfolgreich kopiert", table)
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Detach the source database
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Löse Quell-Datenbank-Verbindung...")
|
2025-07-24 07:39:43 +02:00
|
|
|
if err := targetDB.Exec("DETACH DATABASE source").Error; err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler beim Lösen der Quell-Datenbank-Verbindung: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
return fmt.Errorf("failed to detach source database: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Daten erfolgreich von Datei in Memory-Datenbank kopiert")
|
2025-07-24 07:39:43 +02:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// getTestDataStatistics returns statistics about the test database
|
|
|
|
|
func getTestDataStatistics(db *gorm.DB) (map[string]int64, error) {
|
|
|
|
|
stats := make(map[string]int64)
|
|
|
|
|
|
|
|
|
|
// Count records in each table
|
|
|
|
|
tables := map[string]interface{}{
|
|
|
|
|
"users": &user.User{},
|
2025-07-28 21:35:29 +02:00
|
|
|
"characters": &models.Char{},
|
2025-07-27 23:13:04 +02:00
|
|
|
"gsmaster_skills": &models.Skill{},
|
2025-07-27 23:50:19 +02:00
|
|
|
"gsmaster_spells": &models.Spell{},
|
2025-07-28 18:56:45 +02:00
|
|
|
"gsmaster_equipment": &models.Equipment{},
|
2025-07-28 22:08:19 +02:00
|
|
|
"skills_fertigkeiten": &models.SkFertigkeit{},
|
|
|
|
|
"skills_waffenfertigkeiten": &models.SkWaffenfertigkeit{},
|
|
|
|
|
"skills_zauber": &models.SkZauber{},
|
|
|
|
|
"equipment_ausruestung": &models.EqAusruestung{},
|
|
|
|
|
"equipment_waffen": &models.EqWaffe{},
|
2025-07-24 07:39:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for name, model := range tables {
|
|
|
|
|
var count int64
|
|
|
|
|
if err := db.Model(model).Count(&count).Error; err != nil {
|
|
|
|
|
return stats, fmt.Errorf("failed to count %s: %w", name, err)
|
|
|
|
|
}
|
|
|
|
|
stats[name] = count
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return stats, nil
|
|
|
|
|
}
|
2026-01-25 21:19:53 +01:00
|
|
|
func setupCheck(c *gin.Context, db *gorm.DB) {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Führe Strukturmigration durch...")
|
2025-07-24 07:39:43 +02:00
|
|
|
err := migrateAllStructures(db)
|
2025-01-02 21:42:20 +01:00
|
|
|
if err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler bei der Strukturmigration: %s", err.Error())
|
2025-07-24 07:39:43 +02:00
|
|
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
2025-01-18 20:59:54 +01:00
|
|
|
return
|
2025-01-02 21:42:20 +01:00
|
|
|
}
|
2025-08-07 09:41:51 +02:00
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Debug("Führe Datenmigration durch...")
|
2025-08-07 09:41:51 +02:00
|
|
|
err = migrateDataIfNeeded(db)
|
|
|
|
|
if err != nil {
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Error("Fehler bei der Datenmigration: %s", err.Error())
|
2025-08-07 09:41:51 +02:00
|
|
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to migrate data: " + err.Error()})
|
|
|
|
|
return
|
|
|
|
|
}
|
2025-08-11 07:46:53 +02:00
|
|
|
|
|
|
|
|
logger.Info("Setup-Check erfolgreich abgeschlossen")
|
2025-01-18 20:59:54 +01:00
|
|
|
c.JSON(http.StatusOK, gin.H{"message": "Setup Check OK"})
|
2026-01-25 21:19:53 +01:00
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func SetupCheck(c *gin.Context) {
|
|
|
|
|
logger.Info("Starte Setup-Check...")
|
|
|
|
|
|
|
|
|
|
db := database.ConnectDatabase()
|
|
|
|
|
if db == nil {
|
|
|
|
|
logger.Error("Fehler beim Verbinden mit der Datenbank für Setup-Check")
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to connect to DataBase")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
logger.Debug("Erfolgreich mit Datenbank für Setup-Check verbunden")
|
|
|
|
|
setupCheck(c, db)
|
2025-01-02 21:42:20 +01:00
|
|
|
}
|
2025-07-29 19:21:58 +02:00
|
|
|
|
2025-12-27 08:33:42 +01:00
|
|
|
func SetupCheckDev(c *gin.Context) {
|
|
|
|
|
logger.Info("Starte Setup-Check... PreparedTestDB")
|
|
|
|
|
|
|
|
|
|
// Use the prepared test database for development setup check
|
|
|
|
|
db, dberr := gorm.Open(sqlite.Open(database.PreparedTestDB), &gorm.Config{})
|
|
|
|
|
if dberr != nil {
|
|
|
|
|
logger.Error("SetupTestDB: Fehler beim Verbinden mit der Test-Datenbank: %s", dberr.Error())
|
|
|
|
|
panic("failed to connect to the test database: " + dberr.Error())
|
|
|
|
|
}
|
|
|
|
|
database.DB = db
|
|
|
|
|
|
|
|
|
|
logger.Debug("Erfolgreich mit Datenbank für Setup-Check verbunden")
|
2026-01-25 21:19:53 +01:00
|
|
|
setupCheck(c, db)
|
2025-12-27 08:33:42 +01:00
|
|
|
}
|
2026-01-12 16:36:35 +01:00
|
|
|
|
2026-01-14 21:58:37 +01:00
|
|
|
/*
|
2026-01-12 16:36:35 +01:00
|
|
|
// PopulateClassLearningPoints populates the class learning points tables from hardcoded data
|
|
|
|
|
func PopulateClassLearningPoints(c *gin.Context) {
|
|
|
|
|
logger.Info("Starte Population der Class Learning Points Daten...")
|
|
|
|
|
|
|
|
|
|
err := models.PopulateClassLearningPointsData()
|
|
|
|
|
if err != nil {
|
|
|
|
|
logger.Error("Fehler beim Populieren der Class Learning Points: %s", err.Error())
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to populate class learning points: "+err.Error())
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Info("Class Learning Points erfolgreich populiert")
|
|
|
|
|
c.JSON(http.StatusOK, gin.H{"message": "Class learning points data populated successfully"})
|
|
|
|
|
}
|
2026-01-14 21:58:37 +01:00
|
|
|
*/
|
2026-01-12 16:36:35 +01:00
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
func ReconnectDataBase(c *gin.Context) {
|
|
|
|
|
logger.Info("Führe Datenbank-Reconnect durch...")
|
2025-07-29 19:21:58 +02:00
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
db := database.ConnectDatabase()
|
|
|
|
|
if db == nil {
|
|
|
|
|
logger.Error("Fehler beim Reconnect zur Datenbank")
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to reconnect to DataBase")
|
2025-07-29 19:21:58 +02:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
logger.Info("Datenbank-Reconnect erfolgreich")
|
|
|
|
|
c.JSON(http.StatusOK, gin.H{"message": "Database reconnected successfully"})
|
|
|
|
|
}
|
2025-07-29 19:21:58 +02:00
|
|
|
|
2025-08-11 07:46:53 +02:00
|
|
|
func ReloadENV(c *gin.Context) {
|
|
|
|
|
logger.Info("Starte Reload der Umgebungsvariablen...")
|
|
|
|
|
|
|
|
|
|
// Reload the environment variables
|
|
|
|
|
config.LoadConfig()
|
|
|
|
|
c.JSON(http.StatusOK, gin.H{"message": "Environment variables reloaded successfully"})
|
2025-07-29 19:21:58 +02:00
|
|
|
}
|
2025-08-12 22:09:15 +02:00
|
|
|
|
|
|
|
|
// TransferSQLiteToMariaDB transfers data from SQLite test database to MariaDB
|
|
|
|
|
func TransferSQLiteToMariaDB(c *gin.Context) {
|
|
|
|
|
logger.Info("Starte Datenübertragung von SQLite zu MariaDB...")
|
|
|
|
|
|
|
|
|
|
// Path to the SQLite source database
|
|
|
|
|
sourceFile := preparedTestDB
|
|
|
|
|
|
|
|
|
|
// Check if source file exists
|
|
|
|
|
if _, err := os.Stat(sourceFile); os.IsNotExist(err) {
|
|
|
|
|
logger.Error("SQLite-Quelldatei nicht gefunden: %s", sourceFile)
|
|
|
|
|
respondWithError(c, http.StatusNotFound, "SQLite source file not found: "+sourceFile)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Debug("SQLite-Quelldatei gefunden: %s", sourceFile)
|
|
|
|
|
|
|
|
|
|
// Connect to SQLite source database
|
|
|
|
|
logger.Debug("Verbinde mit SQLite-Quelldatenbank...")
|
|
|
|
|
sourceDB, err := gorm.Open(sqlite.Open(sourceFile), &gorm.Config{})
|
|
|
|
|
if err != nil {
|
|
|
|
|
logger.Error("Fehler beim Verbinden mit SQLite-Datenbank: %s", err.Error())
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to connect to SQLite source: "+err.Error())
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
defer func() {
|
|
|
|
|
if sqlDB, err := sourceDB.DB(); err == nil {
|
|
|
|
|
logger.Debug("Schließe SQLite-Datenbankverbindung")
|
|
|
|
|
sqlDB.Close()
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
logger.Debug("SQLite-Verbindung erfolgreich")
|
|
|
|
|
|
|
|
|
|
// Connect to MariaDB target using the configured connection string
|
|
|
|
|
logger.Debug("Verbinde mit MariaDB-Zieldatenbank...")
|
|
|
|
|
|
|
|
|
|
// Temporarily override config to ensure MariaDB connection
|
|
|
|
|
originalType := config.Cfg.DatabaseType
|
|
|
|
|
originalURL := config.Cfg.DatabaseURL
|
|
|
|
|
originalEnv := config.Cfg.Environment
|
|
|
|
|
|
|
|
|
|
// Force MariaDB connection parameters
|
|
|
|
|
config.Cfg.DatabaseType = "mysql"
|
|
|
|
|
config.Cfg.DatabaseURL = "bamort:bG4)efozrc@tcp(mariadb:3306)/bamort?charset=utf8mb4&parseTime=True&loc=Local"
|
|
|
|
|
config.Cfg.Environment = "production" // Ensure we don't get test DB
|
|
|
|
|
|
|
|
|
|
targetDB := database.ConnectDatabaseOrig() // Use original connection method to avoid test DB
|
|
|
|
|
|
|
|
|
|
// Restore original config
|
|
|
|
|
config.Cfg.DatabaseType = originalType
|
|
|
|
|
config.Cfg.DatabaseURL = originalURL
|
|
|
|
|
config.Cfg.Environment = originalEnv
|
|
|
|
|
|
|
|
|
|
if targetDB == nil {
|
|
|
|
|
logger.Error("Fehler beim Verbinden mit MariaDB-Zieldatenbank")
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to connect to MariaDB target")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
logger.Debug("MariaDB-Verbindung erfolgreich")
|
|
|
|
|
|
|
|
|
|
// Migrate all structures to MariaDB first
|
|
|
|
|
logger.Debug("Migriere Strukturen in MariaDB-Datenbank...")
|
|
|
|
|
if err := migrateAllStructures(targetDB); err != nil {
|
|
|
|
|
logger.Error("Fehler beim Migrieren der Strukturen in MariaDB: %s", err.Error())
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to migrate structures to MariaDB: "+err.Error())
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
logger.Debug("Strukturen erfolgreich migriert")
|
|
|
|
|
|
|
|
|
|
// Clear existing data in MariaDB (optional - be careful!)
|
|
|
|
|
clearExisting := c.Query("clear")
|
|
|
|
|
if clearExisting == "true" {
|
|
|
|
|
logger.Info("Lösche bestehende Daten in MariaDB...")
|
|
|
|
|
if err := clearMariaDBData(targetDB); err != nil {
|
|
|
|
|
logger.Error("Fehler beim Löschen bestehender Daten: %s", err.Error())
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to clear existing data: "+err.Error())
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
logger.Debug("Bestehende Daten gelöscht")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Copy data from SQLite to MariaDB
|
|
|
|
|
logger.Info("Kopiere Daten von SQLite zu MariaDB...")
|
|
|
|
|
if err := copySQLiteToMariaDB(sourceDB, targetDB); err != nil {
|
|
|
|
|
logger.Error("Fehler beim Kopieren der Daten von SQLite zu MariaDB: %s", err.Error())
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to copy data from SQLite to MariaDB: "+err.Error())
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get statistics about the transferred data
|
|
|
|
|
stats, err := getTestDataStatistics(targetDB)
|
|
|
|
|
if err != nil {
|
|
|
|
|
logger.Error("Fehler beim Abrufen der Datenstatistiken: %s", err.Error())
|
|
|
|
|
respondWithError(c, http.StatusInternalServerError, "Failed to get data statistics: "+err.Error())
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Info("Datenübertragung von SQLite zu MariaDB erfolgreich abgeschlossen")
|
|
|
|
|
c.JSON(http.StatusOK, gin.H{
|
|
|
|
|
"message": "Data transfer from SQLite to MariaDB completed successfully",
|
|
|
|
|
"source_file": sourceFile,
|
|
|
|
|
"target": "mariadb:3306/bamort",
|
|
|
|
|
"statistics": stats,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// copySQLiteToMariaDB copies all data from SQLite to MariaDB
|
|
|
|
|
func copySQLiteToMariaDB(sqliteDB, mariaDB *gorm.DB) error {
|
|
|
|
|
logger.Debug("Starte Kopiervorgang aller Daten von SQLite zu MariaDB...")
|
|
|
|
|
|
2025-08-12 22:20:49 +02:00
|
|
|
// Disable foreign key checks temporarily to avoid constraint issues
|
|
|
|
|
logger.Debug("Deaktiviere Foreign Key Checks...")
|
|
|
|
|
if err := mariaDB.Exec("SET FOREIGN_KEY_CHECKS = 0").Error; err != nil {
|
|
|
|
|
logger.Warn("Warnung: Konnte Foreign Key Checks nicht deaktivieren: %s", err.Error())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Re-enable foreign key checks at the end
|
|
|
|
|
defer func() {
|
|
|
|
|
logger.Debug("Aktiviere Foreign Key Checks wieder...")
|
|
|
|
|
if err := mariaDB.Exec("SET FOREIGN_KEY_CHECKS = 1").Error; err != nil {
|
|
|
|
|
logger.Warn("Warnung: Konnte Foreign Key Checks nicht reaktivieren: %s", err.Error())
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
2025-08-12 22:09:15 +02:00
|
|
|
// Same table order as copyMariaDBToSQLite but in reverse direction
|
|
|
|
|
tables := []interface{}{
|
|
|
|
|
// Basis-Strukturen (keine Abhängigkeiten)
|
|
|
|
|
&user.User{},
|
|
|
|
|
|
|
|
|
|
// Learning Costs System - Basis
|
|
|
|
|
&models.Source{},
|
|
|
|
|
&models.CharacterClass{},
|
|
|
|
|
&models.SkillCategory{},
|
|
|
|
|
&models.SkillDifficulty{},
|
|
|
|
|
&models.SpellSchool{},
|
|
|
|
|
|
2025-08-12 22:20:49 +02:00
|
|
|
// GSMaster Basis-Daten (müssen vor den abhängigen Learning Cost Tabellen kommen)
|
2025-08-12 22:09:15 +02:00
|
|
|
&models.Skill{},
|
|
|
|
|
&models.WeaponSkill{},
|
|
|
|
|
&models.Spell{},
|
|
|
|
|
&models.Equipment{},
|
|
|
|
|
&models.Weapon{},
|
|
|
|
|
&models.Container{},
|
|
|
|
|
&models.Transportation{},
|
|
|
|
|
&models.Believe{},
|
|
|
|
|
|
2025-08-12 22:20:49 +02:00
|
|
|
// Learning Costs System - Abhängige Tabellen (nach Skills/Spells)
|
|
|
|
|
&models.ClassCategoryEPCost{},
|
|
|
|
|
&models.ClassSpellSchoolEPCost{},
|
|
|
|
|
&models.SpellLevelLECost{},
|
|
|
|
|
&models.SkillCategoryDifficulty{}, // Jetzt nach Skills
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.WeaponSkillCategoryDifficulty{},
|
2025-08-12 22:20:49 +02:00
|
|
|
&models.SkillImprovementCost{},
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.ClassCategoryLearningPoints{},
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.ClassSpellPoints{},
|
|
|
|
|
&models.ClassTypicalSkill{},
|
|
|
|
|
&models.ClassTypicalSpell{},
|
2025-08-12 22:20:49 +02:00
|
|
|
|
2025-08-12 22:09:15 +02:00
|
|
|
// Charaktere (Basis)
|
|
|
|
|
&models.Char{},
|
|
|
|
|
|
|
|
|
|
// Charakter-Eigenschaften (abhängig von Char)
|
|
|
|
|
&models.Eigenschaft{},
|
|
|
|
|
&models.Lp{},
|
|
|
|
|
&models.Ap{},
|
|
|
|
|
&models.B{},
|
|
|
|
|
&models.Merkmale{},
|
|
|
|
|
&models.Erfahrungsschatz{},
|
|
|
|
|
&models.Bennies{},
|
|
|
|
|
&models.Vermoegen{},
|
|
|
|
|
|
|
|
|
|
// Charakter-Skills (abhängig von Char und Skills)
|
|
|
|
|
&models.SkFertigkeit{},
|
|
|
|
|
&models.SkWaffenfertigkeit{},
|
|
|
|
|
&models.SkAngeboreneFertigkeit{},
|
|
|
|
|
&models.SkZauber{},
|
|
|
|
|
|
|
|
|
|
// Charakter-Equipment (abhängig von Char und Equipment)
|
|
|
|
|
&models.EqAusruestung{},
|
|
|
|
|
&models.EqWaffe{},
|
|
|
|
|
&models.EqContainer{},
|
2026-01-12 16:36:35 +01:00
|
|
|
|
|
|
|
|
// Character Creation Sessions (abhängig von Char)
|
|
|
|
|
&models.CharacterCreationSession{},
|
|
|
|
|
|
|
|
|
|
// Audit Logging (abhängig von Char)
|
|
|
|
|
&models.AuditLogEntry{},
|
2025-08-12 22:09:15 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Info("Kopiere Daten für %d Tabellen von SQLite zu MariaDB...", len(tables))
|
|
|
|
|
for i, model := range tables {
|
|
|
|
|
logger.Debug("Kopiere Tabelle %d/%d: %T", i+1, len(tables), model)
|
|
|
|
|
if err := copyTableDataReverse(sqliteDB, mariaDB, model); err != nil {
|
|
|
|
|
logger.Error("Fehler beim Kopieren der Tabellendaten für %T: %s", model, err.Error())
|
|
|
|
|
return fmt.Errorf("failed to copy table data for %T: %w", model, err)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Info("Alle Tabellendaten erfolgreich von SQLite zu MariaDB kopiert")
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// copyTableDataReverse copies all data from source to target database
|
|
|
|
|
func copyTableDataReverse(sourceDB, targetDB *gorm.DB, model interface{}) error {
|
|
|
|
|
tableName := fmt.Sprintf("%T", model)
|
|
|
|
|
logger.Debug("Starte Kopiervorgang für Tabelle: %s", tableName)
|
|
|
|
|
|
|
|
|
|
// Count records in source
|
|
|
|
|
var count int64
|
|
|
|
|
err := sourceDB.Model(model).Count(&count).Error
|
|
|
|
|
if err != nil {
|
|
|
|
|
if isTableNotExistError(err) {
|
|
|
|
|
logger.Debug("Tabelle %s existiert nicht in der Quelle, überspringe", tableName)
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
logger.Error("Fehler beim Zählen der Datensätze für %s: %s", tableName, err.Error())
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if count == 0 {
|
|
|
|
|
logger.Debug("Tabelle %s ist leer, keine Daten zu kopieren", tableName)
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Debug("Kopiere %d Datensätze für Tabelle %s", count, tableName)
|
|
|
|
|
|
|
|
|
|
// Copy data in batches
|
|
|
|
|
batchSize := 100
|
|
|
|
|
totalBatches := (int(count) + batchSize - 1) / batchSize
|
|
|
|
|
|
|
|
|
|
for batch := 0; batch < totalBatches; batch++ {
|
|
|
|
|
offset := batch * batchSize
|
|
|
|
|
logger.Debug("Verarbeite Batch %d/%d für Tabelle %s (Offset: %d)", batch+1, totalBatches, tableName, offset)
|
|
|
|
|
|
|
|
|
|
// Create slice to hold batch data and read from source
|
|
|
|
|
var records interface{}
|
|
|
|
|
|
|
|
|
|
// Read batch from source
|
|
|
|
|
switch model.(type) {
|
|
|
|
|
case *user.User:
|
|
|
|
|
var batch []user.User
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Source:
|
|
|
|
|
var batch []models.Source
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.CharacterClass:
|
|
|
|
|
var batch []models.CharacterClass
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SkillCategory:
|
|
|
|
|
var batch []models.SkillCategory
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SkillDifficulty:
|
|
|
|
|
var batch []models.SkillDifficulty
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SpellSchool:
|
|
|
|
|
var batch []models.SpellSchool
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.ClassCategoryEPCost:
|
|
|
|
|
var batch []models.ClassCategoryEPCost
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.ClassSpellSchoolEPCost:
|
|
|
|
|
var batch []models.ClassSpellSchoolEPCost
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SpellLevelLECost:
|
|
|
|
|
var batch []models.SpellLevelLECost
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SkillCategoryDifficulty:
|
|
|
|
|
var batch []models.SkillCategoryDifficulty
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
2026-01-12 16:36:35 +01:00
|
|
|
case *models.WeaponSkillCategoryDifficulty:
|
|
|
|
|
var batch []models.WeaponSkillCategoryDifficulty
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
2025-08-12 22:09:15 +02:00
|
|
|
case *models.SkillImprovementCost:
|
|
|
|
|
var batch []models.SkillImprovementCost
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
2026-01-12 16:36:35 +01:00
|
|
|
case *models.ClassCategoryLearningPoints:
|
|
|
|
|
var batch []models.ClassCategoryLearningPoints
|
2026-01-12 16:36:35 +01:00
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.ClassSpellPoints:
|
|
|
|
|
var batch []models.ClassSpellPoints
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.ClassTypicalSkill:
|
|
|
|
|
var batch []models.ClassTypicalSkill
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.ClassTypicalSpell:
|
|
|
|
|
var batch []models.ClassTypicalSpell
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
2025-08-12 22:09:15 +02:00
|
|
|
case *models.Skill:
|
|
|
|
|
var batch []models.Skill
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.WeaponSkill:
|
|
|
|
|
var batch []models.WeaponSkill
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Spell:
|
|
|
|
|
var batch []models.Spell
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Equipment:
|
|
|
|
|
var batch []models.Equipment
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Weapon:
|
|
|
|
|
var batch []models.Weapon
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Container:
|
|
|
|
|
var batch []models.Container
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Transportation:
|
|
|
|
|
var batch []models.Transportation
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Believe:
|
|
|
|
|
var batch []models.Believe
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Char:
|
|
|
|
|
var batch []models.Char
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Eigenschaft:
|
|
|
|
|
var batch []models.Eigenschaft
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Lp:
|
|
|
|
|
var batch []models.Lp
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Ap:
|
|
|
|
|
var batch []models.Ap
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.B:
|
|
|
|
|
var batch []models.B
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Merkmale:
|
|
|
|
|
var batch []models.Merkmale
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Erfahrungsschatz:
|
|
|
|
|
var batch []models.Erfahrungsschatz
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Bennies:
|
|
|
|
|
var batch []models.Bennies
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.Vermoegen:
|
|
|
|
|
var batch []models.Vermoegen
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SkFertigkeit:
|
|
|
|
|
var batch []models.SkFertigkeit
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SkWaffenfertigkeit:
|
|
|
|
|
var batch []models.SkWaffenfertigkeit
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SkAngeboreneFertigkeit:
|
|
|
|
|
var batch []models.SkAngeboreneFertigkeit
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.SkZauber:
|
|
|
|
|
var batch []models.SkZauber
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.EqAusruestung:
|
|
|
|
|
var batch []models.EqAusruestung
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.EqWaffe:
|
|
|
|
|
var batch []models.EqWaffe
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.EqContainer:
|
|
|
|
|
var batch []models.EqContainer
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
2026-01-12 16:36:35 +01:00
|
|
|
case *models.CharacterCreationSession:
|
|
|
|
|
var batch []models.CharacterCreationSession
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
|
|
|
|
case *models.AuditLogEntry:
|
|
|
|
|
var batch []models.AuditLogEntry
|
|
|
|
|
if err := sourceDB.Limit(batchSize).Offset(offset).Find(&batch).Error; err != nil {
|
|
|
|
|
return fmt.Errorf("failed to read batch from source: %w", err)
|
|
|
|
|
}
|
|
|
|
|
records = batch
|
2025-08-12 22:09:15 +02:00
|
|
|
default:
|
|
|
|
|
return fmt.Errorf("unsupported model type: %T", model)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Insert batch into target database using CreateInBatches for better performance
|
|
|
|
|
if err := targetDB.Clauses(clause.OnConflict{UpdateAll: true}).CreateInBatches(records, batchSize).Error; err != nil {
|
|
|
|
|
logger.Error("Fehler beim Einfügen des Batches für Tabelle %s: %s", tableName, err.Error())
|
|
|
|
|
return fmt.Errorf("failed to insert batch for table %s: %w", tableName, err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Debug("Batch %d/%d für Tabelle %s erfolgreich verarbeitet", batch+1, totalBatches, tableName)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Debug("Kopiervorgang für Tabelle %s abgeschlossen", tableName)
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// clearMariaDBData clears all data from MariaDB tables (use with caution!)
|
|
|
|
|
func clearMariaDBData(db *gorm.DB) error {
|
|
|
|
|
logger.Debug("Lösche alle Daten aus MariaDB-Tabellen...")
|
|
|
|
|
|
|
|
|
|
// Clear tables in reverse order due to foreign key constraints
|
2025-08-12 22:20:49 +02:00
|
|
|
// (reverse of the insertion order in copySQLiteToMariaDB)
|
2025-08-12 22:09:15 +02:00
|
|
|
tables := []interface{}{
|
2026-01-12 16:36:35 +01:00
|
|
|
// Audit Logging und Character Creation Sessions (abhängig von Char) - zuerst löschen
|
|
|
|
|
&models.AuditLogEntry{},
|
|
|
|
|
&models.CharacterCreationSession{},
|
|
|
|
|
|
|
|
|
|
// Charakter-Equipment (abhängig von Char und Equipment)
|
2025-08-12 22:09:15 +02:00
|
|
|
&models.EqContainer{},
|
|
|
|
|
&models.EqWaffe{},
|
|
|
|
|
&models.EqAusruestung{},
|
2025-08-12 22:20:49 +02:00
|
|
|
|
|
|
|
|
// Charakter-Skills (abhängig von Char und Skills)
|
2025-08-12 22:09:15 +02:00
|
|
|
&models.SkZauber{},
|
|
|
|
|
&models.SkAngeboreneFertigkeit{},
|
|
|
|
|
&models.SkWaffenfertigkeit{},
|
|
|
|
|
&models.SkFertigkeit{},
|
2025-08-12 22:20:49 +02:00
|
|
|
|
|
|
|
|
// Charakter-Eigenschaften (abhängig von Char)
|
2025-08-12 22:09:15 +02:00
|
|
|
&models.Vermoegen{},
|
|
|
|
|
&models.Bennies{},
|
|
|
|
|
&models.Erfahrungsschatz{},
|
|
|
|
|
&models.Merkmale{},
|
|
|
|
|
&models.B{},
|
|
|
|
|
&models.Ap{},
|
|
|
|
|
&models.Lp{},
|
|
|
|
|
&models.Eigenschaft{},
|
2025-08-12 22:20:49 +02:00
|
|
|
|
|
|
|
|
// Charaktere (Basis)
|
2025-08-12 22:09:15 +02:00
|
|
|
&models.Char{},
|
2025-08-12 22:20:49 +02:00
|
|
|
|
|
|
|
|
// Learning Costs System - Abhängige Tabellen (vor Skills/Spells löschen)
|
|
|
|
|
&models.SkillImprovementCost{},
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.WeaponSkillCategoryDifficulty{},
|
2025-08-12 22:20:49 +02:00
|
|
|
&models.SkillCategoryDifficulty{},
|
|
|
|
|
&models.SpellLevelLECost{},
|
|
|
|
|
&models.ClassSpellSchoolEPCost{},
|
|
|
|
|
&models.ClassCategoryEPCost{},
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.ClassTypicalSpell{},
|
|
|
|
|
&models.ClassTypicalSkill{},
|
|
|
|
|
&models.ClassSpellPoints{},
|
2026-01-12 16:36:35 +01:00
|
|
|
&models.ClassCategoryLearningPoints{},
|
2025-08-12 22:20:49 +02:00
|
|
|
|
|
|
|
|
// GSMaster Basis-Daten
|
2025-08-12 22:09:15 +02:00
|
|
|
&models.Believe{},
|
|
|
|
|
&models.Transportation{},
|
|
|
|
|
&models.Container{},
|
|
|
|
|
&models.Weapon{},
|
|
|
|
|
&models.Equipment{},
|
|
|
|
|
&models.Spell{},
|
|
|
|
|
&models.WeaponSkill{},
|
|
|
|
|
&models.Skill{},
|
2025-08-12 22:20:49 +02:00
|
|
|
|
|
|
|
|
// Learning Costs System - Basis
|
2025-08-12 22:09:15 +02:00
|
|
|
&models.SpellSchool{},
|
|
|
|
|
&models.SkillDifficulty{},
|
|
|
|
|
&models.SkillCategory{},
|
|
|
|
|
&models.CharacterClass{},
|
|
|
|
|
&models.Source{},
|
2025-08-12 22:20:49 +02:00
|
|
|
|
|
|
|
|
// Basis-Strukturen (keine Abhängigkeiten) - zuletzt löschen
|
2025-08-12 22:09:15 +02:00
|
|
|
&user.User{},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, model := range tables {
|
|
|
|
|
tableName := fmt.Sprintf("%T", model)
|
|
|
|
|
logger.Debug("Lösche Daten aus Tabelle: %s", tableName)
|
|
|
|
|
|
|
|
|
|
if err := db.Session(&gorm.Session{AllowGlobalUpdate: true}).Delete(model).Error; err != nil {
|
|
|
|
|
// Continue with other tables even if one fails
|
|
|
|
|
logger.Warn("Warnung beim Löschen der Tabelle %s: %s", tableName, err.Error())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
logger.Debug("Alle Tabellendaten gelöscht")
|
|
|
|
|
return nil
|
|
|
|
|
}
|