+Datenexport

This commit is contained in:
Simon Martens
2026-01-28 17:26:04 +01:00
parent de37145471
commit b0a57884bf
19 changed files with 3729 additions and 1931 deletions

View File

@@ -0,0 +1,67 @@
package exports
import (
"os"
"path/filepath"
"sync"
"time"
"github.com/Theodor-Springmann-Stiftung/musenalm/dbmodels"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/tools/types"
)
var cleanupOnce sync.Once
func StartCleanup(app core.App, interval time.Duration) {
if interval <= 0 {
interval = 12 * time.Hour
}
cleanupOnce.Do(func() {
go func() {
CleanupExpired(app)
ticker := time.NewTicker(interval)
defer ticker.Stop()
for range ticker.C {
CleanupExpired(app)
}
}()
})
}
func CleanupExpired(app core.App) {
now := types.NowDateTime()
records := []*core.Record{}
err := app.RecordQuery(dbmodels.EXPORTS_TABLE).
Where(dbx.NewExp(dbmodels.EXPORT_EXPIRES_FIELD+" <= {:now}", dbx.Params{"now": now})).
All(&records)
if err != nil {
app.Logger().Error("Export cleanup query failed", "error", err)
return
}
if len(records) == 0 {
return
}
exportDir, err := ExportDir(app)
if err != nil {
app.Logger().Error("Export cleanup dir failed", "error", err)
return
}
for _, record := range records {
filename := record.GetString(dbmodels.EXPORT_FILENAME_FIELD)
if filename == "" {
filename = record.Id + ".xml"
}
filename = filepath.Base(filename)
_ = os.Remove(filepath.Join(exportDir, filename))
_ = os.Remove(filepath.Join(exportDir, filename+".tmp"))
if err := app.Delete(record); err != nil {
app.Logger().Error("Export cleanup delete failed", "error", err, "export_id", record.Id)
}
}
}

302
helpers/exports/exporter.go Normal file
View File

@@ -0,0 +1,302 @@
package exports
import (
"archive/zip"
"encoding/xml"
"os"
"path/filepath"
"strings"
"github.com/Theodor-Springmann-Stiftung/musenalm/dbmodels"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/core"
)
const exportDirName = "exports"
func ExportDir(app core.App) (string, error) {
base := filepath.Join(app.DataDir(), exportDirName)
if err := os.MkdirAll(base, 0o755); err != nil {
return "", err
}
return base, nil
}
func ListTables(app core.App) ([]string, error) {
tables := []string{}
err := app.DB().
NewQuery("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name").
Column(&tables)
if err != nil {
return nil, err
}
excluded := map[string]struct{}{
"_superusers": {},
"_mfas": {},
"_otps": {},
"_externalAuths": {},
"_authorigins": {},
"_authOrigins": {},
"access_tokens": {},
}
filtered := tables[:0]
for _, table := range tables {
if strings.HasPrefix(table, dbmodels.FTS5_PREFIX) {
continue
}
if _, ok := excluded[table]; ok {
continue
}
filtered = append(filtered, table)
}
return filtered, nil
}
func Run(app core.App, exportID string) error {
record, err := app.FindRecordById(dbmodels.EXPORTS_TABLE, exportID)
if err != nil {
return err
}
tables, err := ListTables(app)
if err != nil {
return markFailed(app, record, err)
}
record.Set(dbmodels.EXPORT_STATUS_FIELD, dbmodels.EXPORT_STATUS_RUNNING)
record.Set(dbmodels.EXPORT_TABLES_TOTAL_FIELD, len(tables))
record.Set(dbmodels.EXPORT_TABLES_DONE_FIELD, 0)
record.Set(dbmodels.EXPORT_PROGRESS_FIELD, 0)
record.Set(dbmodels.EXPORT_CURRENT_TABLE_FIELD, "")
record.Set(dbmodels.EXPORT_ERROR_FIELD, "")
if err := app.Save(record); err != nil {
return err
}
exportDir, err := ExportDir(app)
if err != nil {
return markFailed(app, record, err)
}
filename := exportID + ".zip"
tempPath := filepath.Join(exportDir, filename+".tmp")
finalPath := filepath.Join(exportDir, filename)
file, err := os.Create(tempPath)
if err != nil {
return markFailed(app, record, err)
}
defer func() {
if file != nil {
_ = file.Close()
}
}()
zipWriter := zip.NewWriter(file)
for idx, table := range tables {
updateProgress(app, record, table, idx, len(tables))
if err := exportTableZipEntry(app, zipWriter, table); err != nil {
return markFailed(app, record, err)
}
updateProgress(app, record, table, idx+1, len(tables))
}
if err := zipWriter.Close(); err != nil {
return markFailed(app, record, err)
}
if err := file.Close(); err != nil {
return markFailed(app, record, err)
}
file = nil
if err := os.Rename(tempPath, finalPath); err != nil {
return markFailed(app, record, err)
}
stat, err := os.Stat(finalPath)
if err != nil {
return markFailed(app, record, err)
}
record.Set(dbmodels.EXPORT_STATUS_FIELD, dbmodels.EXPORT_STATUS_COMPLETE)
record.Set(dbmodels.EXPORT_PROGRESS_FIELD, 100)
record.Set(dbmodels.EXPORT_TABLES_DONE_FIELD, len(tables))
record.Set(dbmodels.EXPORT_FILENAME_FIELD, filename)
record.Set(dbmodels.EXPORT_SIZE_FIELD, stat.Size())
record.Set(dbmodels.EXPORT_CURRENT_TABLE_FIELD, "")
record.Set(dbmodels.EXPORT_ERROR_FIELD, "")
if err := app.Save(record); err != nil {
return err
}
return nil
}
func exportTableZipEntry(app core.App, zipWriter *zip.Writer, table string) error {
entryName := safeFilename(table)
if entryName == "" {
entryName = "table"
}
entryName += ".xml"
entry, err := zipWriter.Create(entryName)
if err != nil {
return err
}
if _, err := entry.Write([]byte(xml.Header)); err != nil {
return err
}
encoder := xml.NewEncoder(entry)
start := xml.StartElement{
Name: xml.Name{Local: "table"},
Attr: []xml.Attr{{Name: xml.Name{Local: "name"}, Value: table}},
}
if err := encoder.EncodeToken(start); err != nil {
return err
}
query := "SELECT * FROM " + quoteTableName(table)
rows, err := app.DB().NewQuery(query).Rows()
if err != nil {
return err
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
return err
}
for rows.Next() {
rowData := dbx.NullStringMap{}
if err := rows.ScanMap(rowData); err != nil {
return err
}
if err := encoder.EncodeToken(xml.StartElement{Name: xml.Name{Local: "row"}}); err != nil {
return err
}
sensitiveFields := map[string]struct{}{}
if table == "users" {
sensitiveFields = map[string]struct{}{
"password": {},
"password_hash": {},
"passwordhash": {},
"tokenkey": {},
"token_key": {},
}
}
for _, col := range columns {
lowerCol := strings.ToLower(col)
if _, ok := sensitiveFields[lowerCol]; ok {
if err := encoder.EncodeToken(xml.StartElement{Name: xml.Name{Local: col}, Attr: []xml.Attr{{Name: xml.Name{Local: "null"}, Value: "true"}}}); err != nil {
return err
}
if err := encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: col}}); err != nil {
return err
}
continue
}
value := rowData[col]
attrs := []xml.Attr{}
if !value.Valid {
attrs = append(attrs, xml.Attr{Name: xml.Name{Local: "null"}, Value: "true"})
}
if err := encoder.EncodeToken(xml.StartElement{Name: xml.Name{Local: col}, Attr: attrs}); err != nil {
return err
}
if value.Valid {
if err := encoder.EncodeToken(xml.CharData([]byte(value.String))); err != nil {
return err
}
}
if err := encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: col}}); err != nil {
return err
}
}
if err := encoder.EncodeToken(xml.EndElement{Name: xml.Name{Local: "row"}}); err != nil {
return err
}
}
if err := rows.Err(); err != nil {
return err
}
if err := encoder.EncodeToken(start.End()); err != nil {
return err
}
return encoder.Flush()
}
func updateProgress(app core.App, record *core.Record, table string, done, total int) {
progress := 0
if total > 0 {
progress = int(float64(done) / float64(total) * 100)
}
record.Set(dbmodels.EXPORT_CURRENT_TABLE_FIELD, table)
record.Set(dbmodels.EXPORT_TABLES_DONE_FIELD, done)
record.Set(dbmodels.EXPORT_PROGRESS_FIELD, progress)
if err := app.Save(record); err != nil {
app.Logger().Error("Export progress update failed", "error", err, "export_id", record.Id)
}
}
func markFailed(app core.App, record *core.Record, err error) error {
record.Set(dbmodels.EXPORT_STATUS_FIELD, dbmodels.EXPORT_STATUS_FAILED)
record.Set(dbmodels.EXPORT_ERROR_FIELD, err.Error())
record.Set(dbmodels.EXPORT_CURRENT_TABLE_FIELD, "")
record.Set(dbmodels.EXPORT_PROGRESS_FIELD, 0)
if saveErr := app.Save(record); saveErr != nil {
return saveErr
}
exportDir, dirErr := ExportDir(app)
if dirErr == nil {
filename := record.GetString(dbmodels.EXPORT_FILENAME_FIELD)
if filename == "" {
filename = record.Id + ".zip"
}
filename = filepath.Base(filename)
_ = os.Remove(filepath.Join(exportDir, filename))
_ = os.Remove(filepath.Join(exportDir, filename+".tmp"))
}
return err
}
func quoteTableName(name string) string {
return "`" + strings.ReplaceAll(name, "`", "``") + "`"
}
func safeFilename(name string) string {
name = strings.TrimSpace(name)
if name == "" {
return ""
}
var b strings.Builder
b.Grow(len(name))
for _, r := range name {
if (r >= 'a' && r <= 'z') ||
(r >= 'A' && r <= 'Z') ||
(r >= '0' && r <= '9') ||
r == '_' || r == '-' || r == '.' {
b.WriteRune(r)
} else {
b.WriteByte('_')
}
}
return b.String()
}

View File

@@ -0,0 +1,254 @@
package exports
import (
"archive/zip"
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"time"
"github.com/Theodor-Springmann-Stiftung/musenalm/dbmodels"
"github.com/pocketbase/pocketbase/core"
)
type fileEntry struct {
CollectionName string
CollectionId string
RecordId string
FieldName string
Filename string
}
func RunFiles(app core.App, exportID string) error {
record, err := app.FindRecordById(dbmodels.EXPORTS_TABLE, exportID)
if err != nil {
return err
}
files, err := collectFileEntries(app)
if err != nil {
return markFailed(app, record, err)
}
record.Set(dbmodels.EXPORT_STATUS_FIELD, dbmodels.EXPORT_STATUS_RUNNING)
record.Set(dbmodels.EXPORT_TABLES_TOTAL_FIELD, len(files))
record.Set(dbmodels.EXPORT_TABLES_DONE_FIELD, 0)
record.Set(dbmodels.EXPORT_PROGRESS_FIELD, 0)
record.Set(dbmodels.EXPORT_CURRENT_TABLE_FIELD, "")
record.Set(dbmodels.EXPORT_ERROR_FIELD, "")
record.Set(dbmodels.EXPORT_FILENAME_FIELD, exportID+"-files.zip")
if err := app.Save(record); err != nil {
return err
}
exportDir, err := ExportDir(app)
if err != nil {
return markFailed(app, record, err)
}
filename := exportID + "-files.zip"
tempPath := filepath.Join(exportDir, filename+".tmp")
finalPath := filepath.Join(exportDir, filename)
file, err := os.Create(tempPath)
if err != nil {
return markFailed(app, record, err)
}
defer func() {
if file != nil {
_ = file.Close()
}
}()
zipWriter := zip.NewWriter(file)
missing := 0
lastProgressSave := time.Now()
for idx, entry := range files {
label := entry.CollectionName + "/" + entry.RecordId + "/" + entry.Filename
if shouldUpdateProgress(idx, len(files), lastProgressSave) {
updateProgress(app, record, label, idx, len(files))
lastProgressSave = time.Now()
}
if err := addFileToZip(app, zipWriter, entry); err != nil {
if os.IsNotExist(err) {
missing++
continue
}
return markFailed(app, record, err)
}
if shouldUpdateProgress(idx+1, len(files), lastProgressSave) {
updateProgress(app, record, label, idx+1, len(files))
lastProgressSave = time.Now()
}
}
if err := zipWriter.Close(); err != nil {
return markFailed(app, record, err)
}
if err := file.Close(); err != nil {
return markFailed(app, record, err)
}
file = nil
if err := os.Rename(tempPath, finalPath); err != nil {
return markFailed(app, record, err)
}
stat, err := os.Stat(finalPath)
if err != nil {
return markFailed(app, record, err)
}
record.Set(dbmodels.EXPORT_STATUS_FIELD, dbmodels.EXPORT_STATUS_COMPLETE)
record.Set(dbmodels.EXPORT_PROGRESS_FIELD, 100)
record.Set(dbmodels.EXPORT_TABLES_DONE_FIELD, len(files))
record.Set(dbmodels.EXPORT_FILENAME_FIELD, filename)
record.Set(dbmodels.EXPORT_SIZE_FIELD, stat.Size())
record.Set(dbmodels.EXPORT_CURRENT_TABLE_FIELD, "")
if missing > 0 {
record.Set(dbmodels.EXPORT_ERROR_FIELD, fmt.Sprintf("%d Datei(en) fehlen im Speicher.", missing))
} else {
record.Set(dbmodels.EXPORT_ERROR_FIELD, "")
}
if err := app.Save(record); err != nil {
return err
}
return nil
}
func collectFileEntries(app core.App) ([]fileEntry, error) {
collections, err := app.FindAllCollections()
if err != nil {
return nil, err
}
entries := make([]fileEntry, 0)
seen := make(map[string]struct{})
for _, collection := range collections {
if collection == nil || collection.IsView() {
continue
}
if strings.HasPrefix(collection.Name, "_") {
continue
}
fileFields := make([]string, 0)
for _, field := range collection.Fields {
if field.Type() == core.FieldTypeFile {
fileFields = append(fileFields, field.GetName())
}
}
if len(fileFields) == 0 {
continue
}
records := []*core.Record{}
if err := app.RecordQuery(collection.Name).All(&records); err != nil {
return nil, err
}
for _, record := range records {
if record == nil {
continue
}
for _, fieldName := range fileFields {
raw := record.GetRaw(fieldName)
for _, filename := range extractFileNames(raw) {
if filename == "" {
continue
}
key := collection.Id + "|" + record.Id + "|" + filename
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
entries = append(entries, fileEntry{
CollectionName: collection.Name,
CollectionId: collection.Id,
RecordId: record.Id,
FieldName: fieldName,
Filename: filename,
})
}
}
}
}
return entries, nil
}
func extractFileNames(raw any) []string {
switch value := raw.(type) {
case nil:
return nil
case string:
v := strings.TrimSpace(value)
if v == "" {
return nil
}
if strings.HasPrefix(v, "[") {
var list []string
if err := json.Unmarshal([]byte(v), &list); err == nil {
return list
}
}
return []string{v}
case []string:
return value
case []any:
out := make([]string, 0, len(value))
for _, item := range value {
if s, ok := item.(string); ok {
out = append(out, s)
}
}
return out
case []byte:
var list []string
if err := json.Unmarshal(value, &list); err == nil {
return list
}
}
return nil
}
func addFileToZip(app core.App, zipWriter *zip.Writer, entry fileEntry) error {
root := filepath.Join(app.DataDir(), "storage")
sourcePath := filepath.Join(root, entry.CollectionId, entry.RecordId, entry.Filename)
reader, err := os.Open(sourcePath)
if err != nil {
return err
}
defer reader.Close()
zipPath := entry.CollectionName + "/" + entry.RecordId + "/" + entry.Filename
zipEntry, err := zipWriter.Create(zipPath)
if err != nil {
return err
}
_, err = io.Copy(zipEntry, reader)
return err
}
func shouldUpdateProgress(done, total int, lastSave time.Time) bool {
if total == 0 {
return true
}
if done == 0 || done >= total {
return true
}
if done%200 == 0 {
return true
}
return time.Since(lastSave) > 2*time.Second
}