Setup Git Repository Parsing

This commit is contained in:
Simon Martens
2023-09-10 01:09:20 +02:00
parent 4e3c65dc6f
commit 8fd0050cf3
69 changed files with 1228 additions and 1461 deletions

2
.editorconfig Normal file
View File

@@ -0,0 +1,2 @@
[*.cs]
csharp_new_line_before_open_brace = none

View File

@@ -4,6 +4,7 @@ using HaDocument.Logic;
using HaDocument.Reactors;
using HaXMLReader.Interfaces;
using HaXMLReader;
using System.Xml.Linq;
namespace HaDocument
{
@@ -32,6 +33,18 @@ namespace HaDocument
return GetLibrary();
}
public static ILibrary Create(IHaDocumentOptions Settings, XElement root) {
_lib = new IntermediateLibrary();
SettingsValidator.Validate(Settings);
_settings = Settings;
_createReader(root);
_createReactors();
_reader.Read();
_library = _createLibrary();
_reader.Dispose();
return GetLibrary();
}
private static void _createReactors() {
new EditreasonReactor(_reader, _lib, _settings.NormalizeWhitespace);
new HandDefsReactor(_reader, _lib);
@@ -49,6 +62,10 @@ namespace HaDocument
_reader = new FileReader(_settings.HamannXMLFilePath);
}
private static void _createReader(XElement root) {
_reader = new XElementReader(root);
}
private static ILibrary _createLibrary()
=> _lib.GetLibrary(_settings);

2
HaWeb/.editorconfig Normal file
View File

@@ -0,0 +1,2 @@
[*.cs]
csharp_new_line_before_open_brace = none

View File

@@ -20,265 +20,27 @@ using Microsoft.AspNetCore.Http.Features;
using System.Text;
// Controlling all the API-Endpoints
[FeatureGate(Features.AdminService)]
public class APIController : Controller {
// DI
private IHaDocumentWrappper _lib;
private IReaderService _readerService;
private readonly long _fileSizeLimit;
private readonly string _targetFilePath;
private readonly IXMLService _xmlService;
private readonly IXMLProvider _xmlProvider;
private readonly IXMLTestService _testService;
private readonly IXMLFileProvider _xmlProvider;
// Options
private static readonly string[] _permittedExtensions = { ".xml" };
private static readonly FormOptions _defaultFormOptions = new FormOptions();
public APIController(IHaDocumentWrappper lib, IReaderService readerService, IXMLService xmlService, IXMLProvider xmlProvider, IXMLTestService testService, IConfiguration config) {
public APIController(IHaDocumentWrappper lib, IXMLFileProvider xmlProvider) {
_lib = lib;
_xmlProvider = xmlProvider;
_readerService = readerService;
_xmlService = xmlService;
_testService = testService;
_fileSizeLimit = config.GetValue<long>("FileSizeLimit");
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) {
_targetFilePath = config.GetValue<string>("StoredFilePathWindows");
} else {
_targetFilePath = config.GetValue<string>("StoredFilePathLinux");
}
}
[HttpGet]
[Route("API/Syntaxcheck/{id}")]
[DisableFormValueModelBinding]
[ValidateAntiForgeryToken]
[FeatureGate(Features.UploadService, Features.AdminService)]
public IActionResult SyntaxCheck(string id) {
return Ok();
}
//// UPLOAD ////
[HttpPost]
[Route("API/Upload")]
[DisableFormValueModelBinding]
[ValidateAntiForgeryToken]
[FeatureGate(Features.UploadService, Features.AdminService)]
public async Task<IActionResult> Upload() {
List<XMLRootDocument>? docs = null;
//// 1. Stage: Check Request format and request spec
// Checks the Content-Type Field (must be multipart + Boundary)
if (!MultipartRequestHelper.IsMultipartContentType(Request.ContentType)) {
ModelState.AddModelError("Error", $"Wrong / No Content Type on the Request");
return BadRequest(ModelState);
}
// Divides the multipart document into it's sections and sets up a reader
var boundary = MultipartRequestHelper.GetBoundary(MediaTypeHeaderValue.Parse(Request.ContentType), _defaultFormOptions.MultipartBoundaryLengthLimit);
var reader = new MultipartReader(boundary, HttpContext.Request.Body);
MultipartSection? section = null;
try {
section = await reader.ReadNextSectionAsync();
} catch (Exception ex) {
ModelState.AddModelError("Error", "The Request is bad: " + ex.Message);
return BadRequest(ModelState);
}
while (section != null) {
// Multipart document content disposition header read for a section:
// Starts with boundary, contains field name, content-dispo, filename, content-type
var hasContentDispositionHeader = ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out var contentDisposition);
if (contentDisposition != null && contentDisposition.Name == "__RequestVerificationToken") {
try {
section = await reader.ReadNextSectionAsync();
} catch (Exception ex) {
ModelState.AddModelError("Error", "The Request is bad: " + ex.Message);
}
continue;
}
if (hasContentDispositionHeader && contentDisposition != null) {
// Checks if it is a section with content-disposition, name, filename
if (!MultipartRequestHelper.HasFileContentDisposition(contentDisposition)) {
ModelState.AddModelError("Error", $"Wrong Content-Dispostion Headers in Multipart Document");
return BadRequest(ModelState);
}
//// 2. Stage: Check File. Sanity checks on the file on a byte level, extension checking, is it empty etc.
var streamedFileContent = await XMLFileHelpers.ProcessStreamedFile(
section, contentDisposition, ModelState,
_permittedExtensions, _fileSizeLimit);
if (!ModelState.IsValid || streamedFileContent == null)
return BadRequest(ModelState);
//// 3. Stage: Valid XML checking using a simple XDocument.Load()
var xdocument = await XDocumentFileHelper.ProcessStreamedFile(streamedFileContent, ModelState);
if (!ModelState.IsValid || xdocument == null)
return UnprocessableEntity(ModelState);
//// 4. Stage: Is it a Hamann-Document? What kind?
var retdocs = _xmlService.ProbeFile(xdocument, ModelState);
if (!ModelState.IsValid || retdocs == null || !retdocs.Any())
return UnprocessableEntity(ModelState);
//// 5. Stage: Saving the File(s)
foreach (var doc in retdocs) {
// Physical saving
await _xmlProvider.Save(doc, _targetFilePath, ModelState);
// Setting the new docuemnt as used
_xmlService.Use(doc);
// Unsetting all old docuemnts as ununsed
_xmlService.AutoUse(doc.Prefix);
if (!ModelState.IsValid) return StatusCode(500, ModelState);
if (docs == null) docs = new List<XMLRootDocument>();
docs.Add(doc);
}
xdocument = null;
retdocs = null;
streamedFileContent = null;
}
try {
section = await reader.ReadNextSectionAsync();
} catch (Exception ex) {
ModelState.AddModelError("Error", "The Request is bad: " + ex.Message);
return BadRequest(ModelState);
}
}
// 6. Stage: Success! Returning Ok, and redirecting
JsonSerializerOptions options = new() {
ReferenceHandler = ReferenceHandler.Preserve,
Converters = {
new IdentificationStringJSONConverter()
}
};
string json = JsonSerializer.Serialize(docs);
_testService.Test();
return Created(nameof(UploadController), json);
}
//// PUBLISH ////
[HttpPost]
[Route("API/LocalPublish")]
[DisableFormValueModelBinding]
[ValidateAntiForgeryToken]
[FeatureGate(Features.LocalPublishService, Features.AdminService, Features.UploadService)]
public async Task<IActionResult> LocalPublish() {
var element = _xmlService.MergeUsedDocuments(ModelState);
if (!ModelState.IsValid || element == null)
return BadRequest(ModelState);
var savedfile = await _xmlProvider.SaveHamannFile(element, _targetFilePath, ModelState);
if (!ModelState.IsValid || savedfile == null) {
if (savedfile != null)
_xmlProvider.DeleteHamannFile(savedfile.Name);
return BadRequest(ModelState);
}
_ = _lib.SetLibrary(savedfile.PhysicalPath, ModelState);
if (!ModelState.IsValid) {
_xmlProvider.DeleteHamannFile(savedfile.Name);
return BadRequest(ModelState);
}
_xmlProvider.SetInProduction(savedfile);
_xmlService.SetInProduction();
return Created("/", _xmlProvider.GetHamannFiles());
}
[HttpPost]
[Route("API/SetUsed/{id}")]
[DisableFormValueModelBinding]
[ValidateAntiForgeryToken]
[FeatureGate(Features.UploadService, Features.AdminService)]
public async Task<IActionResult> SetUsed(string id) {
var f = _xmlProvider.GetFiles(id);
if (f == null) {
ModelState.AddModelError("Error", "Wrong Endpoint");
return BadRequest(ModelState);
}
var files = f.GetFileList();
if (files == null) {
ModelState.AddModelError("Error", "Wrong Endpoint");
return BadRequest(ModelState);
}
List<XMLRootDocument>? newUsed = null;
if (!MultipartRequestHelper.IsMultipartContentType(Request.ContentType)) {
ModelState.AddModelError("Error", $"Wrong / No Content Type on the Request");
return BadRequest(ModelState);
}
// Same as above, check Upload()
var boundary = MultipartRequestHelper.GetBoundary(MediaTypeHeaderValue.Parse(Request.ContentType), _defaultFormOptions.MultipartBoundaryLengthLimit);
var reader = new MultipartReader(boundary, HttpContext.Request.Body);
MultipartSection? section = null;
try {
section = await reader.ReadNextSectionAsync();
} catch (Exception ex) {
ModelState.AddModelError("Error", "The Request is bad: " + ex.Message);
return BadRequest(ModelState);
}
while (section != null) {
var hasContentDispositionHeader = ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out var contentDisposition);
if (contentDisposition != null && contentDisposition.Name == "__RequestVerificationToken") {
try {
section = await reader.ReadNextSectionAsync();
} catch (Exception ex) {
ModelState.AddModelError("Error", "The Request is bad: " + ex.Message);
}
continue;
}
var filename = string.Empty;
if (hasContentDispositionHeader && contentDisposition != null) {
if (!MultipartRequestHelper.HasFormDataContentDisposition(contentDisposition)) {
ModelState.AddModelError("Error", $"Wrong Content-Dispostion Headers in Multipart Document");
return BadRequest(ModelState);
}
filename = XMLFileHelpers.StreamToString(section.Body, ModelState);
if (!ModelState.IsValid) return BadRequest(ModelState);
var isFile = files.Where(x => x.FileName == filename);
if (isFile == null || !isFile.Any()) {
ModelState.AddModelError("Error", "Tried to add a file that does not exist.");
return BadRequest(ModelState);
}
if (newUsed == null) newUsed = new List<XMLRootDocument>();
newUsed.Add(isFile.First());
}
try {
section = await reader.ReadNextSectionAsync();
} catch (Exception ex) {
ModelState.AddModelError("Error", "The Request is bad: " + ex.Message);
return BadRequest(ModelState);
}
}
_xmlService.UnUse(id);
if (newUsed != null && newUsed.Any()) {
newUsed.ForEach(x => _xmlService.Use(x));
}
_testService.Test();
return Created("/", newUsed);
}
[HttpPost]
[Route("API/SetInProduction")]
[DisableFormValueModelBinding]
[ValidateAntiForgeryToken]
[FeatureGate(Features.UploadService, Features.AdminService)]
[FeatureGate(Features.LocalPublishService, Features.AdminService)]
public async Task<IActionResult> SetInProduction() {
var hF = _xmlProvider.GetHamannFiles();
if (hF == null) {
@@ -334,31 +96,18 @@ public class APIController : Controller {
}
if (filename == null) {
ModelState.AddModelError("Error", "No filename given");
ModelState.AddModelError("Error", "Kein Dateiname.");
return BadRequest(ModelState);
}
var newFile = hF.Where(x => x.Name == filename);
if (newFile == null || !newFile.Any()) {
ModelState.AddModelError("Error", "Trying to set a unavailable file.");
ModelState.AddModelError("Error", "Versuch, auf eine unverfügbare Datei zuzugreifen.");
return BadRequest(ModelState);
}
_ = _lib.SetLibrary(newFile.First().PhysicalPath, ModelState);
_ = _lib.SetLibrary(newFile.First(), null, ModelState);
if (!ModelState.IsValid) return BadRequest(ModelState);
_xmlProvider.SetInProduction(newFile.First());
return Created("/", newFile.First());
}
[HttpPost]
[Route("API/SetYearSetting")]
[ValidateAntiForgeryToken]
[FeatureGate(Features.UploadService, Features.AdminService)]
public async Task<IActionResult>? SetEndYear(YearSetting startendyear) {
_lib.SetEndYear(startendyear.EndYear);
return Created("/", "");;
}
}

View File

@@ -18,6 +18,6 @@ public class AdminController : Controller {
[Route("Admin")]
[FeatureGate(Features.AdminService)]
public IActionResult Index() {
return Redirect("/Admin/Upload");
return Redirect("/Admin/XMLState");
}
}

View File

@@ -143,7 +143,7 @@ public class Briefecontroller : Controller {
}
private static List<(string Sender, string Receiver)> generateSendersRecievers(List<Person>? senders, List<Person>? receivers, bool generatePersonLinks) {
private static List<(string Sender, string Receiver)>? generateSendersRecievers(List<Person>? senders, List<Person>? receivers, bool generatePersonLinks) {
var res = new List<(string Sender, string Receiver)>();
if (senders == null || receivers == null) return null;
if (!generatePersonLinks) {

View File

@@ -13,11 +13,11 @@ namespace HaWeb.Controllers;
public class IndexController : Controller {
private IHaDocumentWrappper _lib;
private IReaderService _readerService;
private IXMLService _xmlService;
private IXMLInteractionService _xmlService;
private int _lettersForPage;
private int _endYear;
public IndexController(IHaDocumentWrappper lib, IReaderService readerService, IXMLService service, IConfiguration config) {
public IndexController(IXMLFileProvider _, IHaDocumentWrappper lib, IReaderService readerService, IXMLInteractionService service, IConfiguration config) {
_lib = lib;
_readerService = readerService;
_xmlService = service;

View File

@@ -18,10 +18,10 @@ namespace HaWeb.Controllers;
public class SucheController : Controller {
private IHaDocumentWrappper _lib;
private IReaderService _readerService;
private IXMLService _xmlService;
private IXMLInteractionService _xmlService;
private int _lettersForPage;
public SucheController(IHaDocumentWrappper lib, IReaderService readerService, IXMLService service, IConfiguration config) {
public SucheController(IHaDocumentWrappper lib, IReaderService readerService, IXMLInteractionService service, IConfiguration config) {
_lib = lib;
_readerService = readerService;
_xmlService = service;
@@ -206,7 +206,7 @@ public class SucheController : Controller {
string activeSearch,
SearchType ST,
SearchResultType SRT,
List<CommentModel> comments) {
List<CommentModel>? comments) {
// Model init & return
var model = new SucheViewModel(ST, SRT, null, 0, null, activeSearch, null, null, comments, null);
return View("~/Views/HKB/Dynamic/Suche.cshtml", model);

View File

@@ -1,107 +0,0 @@
namespace HaWeb.Controllers;
using Microsoft.AspNetCore.Mvc;
using HaDocument.Interfaces;
using HaXMLReader.Interfaces;
using Microsoft.FeatureManagement.Mvc;
using System.Runtime.InteropServices;
using Microsoft.AspNetCore.Http.Features;
using Microsoft.Extensions.Configuration;
using HaWeb.Filters;
using HaWeb.XMLParser;
using HaWeb.Models;
using HaWeb.FileHelpers;
using Microsoft.AspNetCore.Mvc.Rendering;
public class UploadController : Controller {
// DI
private IHaDocumentWrappper _lib;
private IReaderService _readerService;
private readonly long _fileSizeLimit;
private readonly string _targetFilePath;
private readonly IXMLService _xmlService;
private readonly IXMLProvider _xmlProvider;
// Options
private static readonly string[] _permittedExtensions = { ".xml" };
private static readonly FormOptions _defaultFormOptions = new FormOptions();
public UploadController(IHaDocumentWrappper lib, IReaderService readerService, IXMLService xmlService, IXMLProvider xmlProvider, IConfiguration config) {
_lib = lib;
_readerService = readerService;
_xmlService = xmlService;
_xmlProvider = xmlProvider;
_fileSizeLimit = config.GetValue<long>("FileSizeLimit");
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) {
_targetFilePath = config.GetValue<string>("StoredFilePathWindows");
} else {
_targetFilePath = config.GetValue<string>("StoredFilePathLinux");
}
}
[HttpGet]
[Route("Admin/Upload/{id?}")]
[FeatureGate(Features.AdminService)]
[GenerateAntiforgeryTokenCookie]
public IActionResult Index(string? id) {
var library = _lib.GetLibrary();
var roots = _xmlService.GetRootsList();
if (roots == null) return error404();
var hF = _xmlProvider.GetHamannFiles();
List<FileModel>? hamannFiles = null;
if (hF != null)
hamannFiles = hF
.OrderByDescending(x => x.LastModified)
.Select(x => new FileModel(x.Name, string.Empty, x.LastModified.LocalDateTime, false, x == _xmlProvider.GetInProduction()))
.ToList();
var uF = _xmlService.GetUsedDictionary();
var pF = _xmlService.GetInProduction();
Dictionary<string, List<FileModel>?>? usedFiles = null;
if (uF != null) {
usedFiles = new Dictionary<string, List<FileModel>?>();
foreach (var kv in uF) {
if (kv.Value == null) continue;
usedFiles.Add(kv.Key, XMLFileHelpers.ToFileModel(kv.Value, pF, uF));
}
}
Dictionary<string, List<FileModel>?>? productionFiles = null;
if (pF != null) {
productionFiles = new Dictionary<string, List<FileModel>?>();
foreach (var kv in pF) {
if (kv.Value == null) continue;
productionFiles.Add(kv.Key, XMLFileHelpers.ToFileModel(kv.Value, pF, uF));
}
}
var availableYears = library.MetasByYear.Select(x => x.Key).Union(library.ExcludedMetasByYear.Select(x => x.Key)).ToList();
availableYears.Sort();
if (id != null) {
id = id.ToLower();
var root = _xmlService.GetRoot(id);
if (root == null) return error404();
var model = new UploadViewModel(root.Type, id, roots, usedFiles, _lib.GetStartYear(), _lib.GetEndYear(), availableYears);
model.ProductionFiles = productionFiles;
model.HamannFiles = hamannFiles;
model.AvailableFiles = XMLFileHelpers.ToFileModel(_xmlProvider.GetFiles(id), pF, uF);
return View("~/Views/Admin/Dynamic/Upload.cshtml", model);
} else {
var model = new UploadViewModel("Upload & Veröffentlichen", id, roots, usedFiles, _lib.GetStartYear(), _lib.GetEndYear(), availableYears);
model.ProductionFiles = productionFiles;
model.HamannFiles = hamannFiles;
return View("~/Views/Admin/Dynamic/Upload.cshtml", model);
}
}
private IActionResult error404() {
Response.StatusCode = 404;
return Redirect("/Error404");
}
}

View File

@@ -0,0 +1,50 @@
namespace HaWeb.Controllers;
using Microsoft.AspNetCore.Mvc;
using Microsoft.FeatureManagement.Mvc;
using HaWeb.Filters;
using HaWeb.XMLParser;
using HaWeb.Models;
using HaWeb.FileHelpers;
using HaWeb.BackgroundTask;
[FeatureGate(Features.AdminService)]
public class XMLStateController : Controller {
// DI
private IHaDocumentWrappper _lib;
private readonly IXMLInteractionService _xmlService;
private readonly IXMLFileProvider _xmlProvider;
private readonly IMonitorLoop _loop;
public XMLStateController(IMonitorLoop loop, IHaDocumentWrappper lib, IXMLInteractionService xmlService, IXMLFileProvider xmlProvider) {
_lib = lib;
_xmlService = xmlService;
_xmlProvider = xmlProvider;
_loop = loop;
}
[HttpGet]
[Route("Admin/XMLState/")]
[FeatureGate(Features.AdminService)]
[GenerateAntiforgeryTokenCookie]
public IActionResult Index() {
_loop.StartMonitorLoop();
var library = _lib.GetLibrary();
var roots = _xmlService.GetRootsList();
if (roots == null) return error404();
var hF = _xmlProvider.GetHamannFiles()?.OrderByDescending(x => x.LastModified).ToList();
var mF = _xmlService.GetManagedFiles();
var gD = _xmlProvider.GetGitData();
var activeF = _lib.GetActiveFile();
var vS = _xmlService.GetValidState();
var model = new XMLStateViewModel("Dateiübersicht", gD, roots, hF, mF, vS) {
ActiveFile = activeF,
};
return View("~/Views/Admin/Dynamic/XMLState.cshtml", model);
}
private IActionResult error404() {
Response.StatusCode = 404;
return Redirect("/Error404");
}
}

View File

@@ -0,0 +1,85 @@
using System.Timers;
namespace HaWeb.FileHelpers;
public class ConfigurationMonitor {
private System.Timers.Timer? _timer;
private (string, byte[])[]? _h;
private IServiceProvider _serviceProvider;
public ConfigurationMonitor(string[] paths, IServiceProvider services) {
_h = _getHash(paths);
_serviceProvider = services;
}
private static (string, byte[])[]? _getHash(string[] paths) {
if (paths == null || !paths.Any()) return null;
var ret = new List<(string, byte[])>();
foreach(var c in paths)
ret.Add((c, _computeHash(c)));
return ret.ToArray();
}
private bool isEqual((string, byte[])[]? _h1, (string, byte[])[]? _h2) {
if (_h1 == null && _h2 == null) return true;
if (_h1 == null && _h2 != null) return false;
if (_h2 == null && _h1 != null) return false;
if (_h1!.Count() != _h2!.Count()) return false;
foreach (var h1 in _h1!) {
foreach (var h2 in _h2!) {
if (h1.Item1 == h2.Item1 && !Enumerable.SequenceEqual(h1.Item2, h2.Item2)) return false;
}
}
return true;
}
public void InvokeChanged(string[] paths) {
var h = _getHash(paths);
if (_timer == null && !isEqual(h, _h)) {
_h = h;
_timer = new(5000) { AutoReset = false };
_timer.Enabled = true;
_timer.Elapsed += Action;
}
}
private void Action(Object source, System.Timers.ElapsedEventArgs e) {
Console.WriteLine("Configuration changed (ConfigurationMonitor Class)");
using IServiceScope serviceScope = _serviceProvider.CreateScope();
IServiceProvider provider = serviceScope.ServiceProvider;
var cP = provider.GetRequiredService<IConfiguration>();
var hP = provider.GetRequiredService<IHaDocumentWrappper>();
hP.ParseConfiguration(cP);
var fP = provider.GetRequiredService<IXMLFileProvider>();
fP.Reload(cP);
// _lifetime.StopApplication();
_timer = null;
}
private static byte[] _computeHash(string filePath) {
var runCount = 1;
while(runCount < 4) {
try {
if (File.Exists(filePath))
using (var fs = File.OpenRead(filePath)) {
return System.Security.Cryptography.SHA1
.Create().ComputeHash(fs);
}
else {
throw new FileNotFoundException();
}
}
catch (IOException ex) {
if (runCount == 3)
throw;
Thread.Sleep(TimeSpan.FromSeconds(Math.Pow(2, runCount)));
runCount++;
}
}
return new byte[20];
}
}

View File

@@ -9,12 +9,13 @@ using HaXMLReader.Interfaces;
using HaWeb.SearchHelpers;
using HaWeb.XMLParser;
using System.Text;
using System.Xml.Linq;
using System.Diagnostics;
public class HaDocumentWrapper : IHaDocumentWrappper {
private ILibrary Library;
private IXMLProvider _xmlProvider;
private IXMLService _xmlService;
private string _filepath;
private IFileInfo _ActiveFile;
private ILibrary? Library;
private IXMLInteractionService _xmlService;
private int _startYear;
private int _endYear;
private List<Person>? _availablePersons;
@@ -22,23 +23,14 @@ public class HaDocumentWrapper : IHaDocumentWrappper {
// public List<SearchHelpers.CollectedItem>? SearchableLetters { get; private set; }
public HaDocumentWrapper(IXMLProvider xmlProvider, IXMLService service, IConfiguration configuration) {
_xmlProvider = xmlProvider;
public HaDocumentWrapper(IXMLInteractionService service, IConfiguration configuration) {
_xmlService = service;
ParseConfiguration(configuration);
}
public void ParseConfiguration(IConfiguration configuration) {
_startYear = configuration.GetValue<int>("AvailableStartYear");
_endYear = configuration.GetValue<int>("AvailableEndYear");
var filelist = xmlProvider.GetHamannFiles();
if (filelist != null && filelist.Any()) {
_AutoLoad(filelist);
}
// Use Fallback library
if (Library == null) {
var options = new HaWeb.Settings.HaDocumentOptions();
if (SetLibrary(options.HamannXMLFilePath) == null) {
throw new Exception("Die Fallback Hamann.xml unter " + options.HamannXMLFilePath + " kann nicht geparst werden.");
}
}
}
public List<Person>? GetAvailablePersons() => _availablePersons;
@@ -49,26 +41,29 @@ public class HaDocumentWrapper : IHaDocumentWrappper {
public int GetEndYear() => _endYear;
public void SetEndYear(int end) {
this._endYear = end;
SetLibrary(_filepath);
}
public IFileInfo GetActiveFile() => _ActiveFile;
public ILibrary? SetLibrary(string filepath, ModelStateDictionary? ModelState = null) {
// 1. Set ILibrary
public ILibrary? SetLibrary(IFileInfo? file, XDocument? doc, ModelStateDictionary? ModelState = null) {
// Handle null on file & doc
var path = file == null ? new HaWeb.Settings.HaDocumentOptions().HamannXMLFilePath : file.PhysicalPath;
if (doc == null) doc = XDocument.Load(path, LoadOptions.PreserveWhitespace);
// 1. Parse the Document, create search Index
if (_xmlService != null)
_xmlService.CreateSearchables(doc);
// 2. Set ILibrary
try {
Library = HaDocument.Document.Create(new HaWeb.Settings.HaDocumentOptions() { HamannXMLFilePath = filepath, AvailableYearRange = (_startYear, _endYear) });
Library = HaDocument.Document.Create(new HaWeb.Settings.HaDocumentOptions() { HamannXMLFilePath = path, AvailableYearRange = (_startYear, _endYear) }, doc.Root);
} catch (Exception ex) {
if (ModelState != null) ModelState.AddModelError("Error", "Das Dokument konnte nicht geparst werden: " + ex.Message);
return null;
}
// 1a. Set Available Persons
// 3a. Set Available Persons
var persons = Library.Metas.SelectMany(x => x.Value.Senders.Union(x.Value.Receivers)).Distinct();
_availablePersons = persons.Select(x => Library.Persons[x]).OrderBy(x => x.Surname).ThenBy(x => x.Prename).ToList();
// 1b. Setup a Dictionary with available Person ovierview Pages
// 3b. Setup a Dictionary with available Person ovierview Pages
_personsWithLetters = new Dictionary<string, Person>();
var availablePersonPages = Library.Persons.Where(x => !String.IsNullOrWhiteSpace(x.Value.Komm));
foreach (var p in availablePersonPages) {
@@ -77,30 +72,12 @@ public class HaDocumentWrapper : IHaDocumentWrappper {
}
}
// 2. Set Library in Production, collect some Objects
if (_xmlService != null)
_xmlService.SetInProduction(System.Xml.Linq.XDocument.Load(filepath, System.Xml.Linq.LoadOptions.PreserveWhitespace));
// 3. Set Filepath
_filepath = filepath;
// 4. Set info on loaded file
_ActiveFile = file;
return Library;
}
public ILibrary GetLibrary() {
public ILibrary? GetLibrary() {
return Library;
}
private void _AutoLoad(List<IFileInfo> files) {
var orderdlist = files.OrderByDescending(x => x.LastModified);
foreach (var item in orderdlist) {
if (SetLibrary(item.PhysicalPath) != null) {
_xmlProvider.SetInProduction(item);
return;
}
}
}
private string _prepareSearch(HaDocument.Interfaces.ISearchable objecttoseach) {
return SearchHelpers.StringHelpers.NormalizeWhiteSpace(objecttoseach.Element, ' ', false);
}
}

View File

@@ -0,0 +1,6 @@
namespace HaWeb.FileHelpers;
public interface IConfigurationMonitor
{
}

View File

@@ -3,14 +3,16 @@ using HaDocument.Interfaces;
using HaDocument.Models;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using HaXMLReader.Interfaces;
using Microsoft.Extensions.FileProviders;
using System.Xml.Linq;
public interface IHaDocumentWrappper {
public ILibrary? SetLibrary(string filepath, ModelStateDictionary ModelState);
public ILibrary GetLibrary();
public ILibrary? SetLibrary(IFileInfo? file, XDocument? doc, ModelStateDictionary? ModelState);
public ILibrary? GetLibrary();
public void ParseConfiguration(IConfiguration configuration);
public int GetStartYear();
public int GetEndYear();
public IFileInfo GetActiveFile();
public List<Person>? GetAvailablePersons();
public Dictionary<string, Person>? GetPersonsWithLetters();
public void SetEndYear(int end);
}

View File

@@ -0,0 +1,17 @@
namespace HaWeb.FileHelpers;
using Microsoft.Extensions.FileProviders;
using System.Xml.Linq;
using HaWeb.Models;
using Microsoft.AspNetCore.Mvc.ModelBinding;
public interface IXMLFileProvider {
public List<IFileInfo>? GetWorkingTreeFiles();
public IFileInfo? SaveHamannFile(XElement element, string basefilepath, ModelStateDictionary ModelState);
public List<IFileInfo>? GetHamannFiles();
public (DateTime PullTime, string Hash)? GetGitData();
public void Reload(IConfiguration config);
public bool HasChanged();
public void DeleteHamannFile(string filename);
public void Scan();
}

View File

@@ -1,16 +0,0 @@
namespace HaWeb.FileHelpers;
using Microsoft.Extensions.FileProviders;
using System.Xml.Linq;
using HaWeb.Models;
using Microsoft.AspNetCore.Mvc.ModelBinding;
public interface IXMLProvider {
public FileList? GetFiles(string prefix);
public Task Save(XMLRootDocument doc, string basefilepath, ModelStateDictionary ModelState);
public Task<IFileInfo?> SaveHamannFile(XElement element, string basefilepath, ModelStateDictionary ModelState);
public List<IFileInfo>? GetHamannFiles();
public IFileInfo? GetInProduction();
public void SetInProduction(IFileInfo info);
public void DeleteHamannFile(string filename);
}

View File

@@ -1,32 +0,0 @@
namespace HaWeb.FileHelpers;
using System.Xml.Linq;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using System.Text;
using System.Xml;
public static class XDocumentFileHelper {
private readonly static XmlReaderSettings _Settings = new XmlReaderSettings() {
CloseInput = true,
CheckCharacters = false,
ConformanceLevel = ConformanceLevel.Fragment,
IgnoreComments = true,
IgnoreProcessingInstructions = true,
IgnoreWhitespace = false
};
public static async Task<XDocument?> ProcessStreamedFile(byte[] bytes, ModelStateDictionary modelState) {
try {
using (var stream = new MemoryStream(bytes)) {
using (var xmlreader = XmlReader.Create(stream, _Settings)) {
return XDocument.Load(xmlreader, LoadOptions.PreserveWhitespace | LoadOptions.SetLineInfo);
}
}
} catch (Exception ex) {
modelState.AddModelError("Error", $"Kein gültiges XML-Dokument geladen. Error: {ex.Message}");
}
return null;
}
}

View File

@@ -54,152 +54,54 @@ public static class XMLFileHelpers {
}
};
// Unused as of rn, used to take a file and do the same sanity checks as below
// public static async Task<byte[]> ProcessFormFile<T>(IFormFile formFile, ModelStateDictionary modelState, string[] permittedExtensions, long sizeLimit)
// {
// var fieldDisplayName = string.Empty;
// // Use reflection to obtain the display name for the model
// // property associated with this IFormFile. If a display
// // name isn't found, error messages simply won't show
// // a display name.
// MemberInfo property =
// typeof(T).GetProperty(
// formFile.Name.Substring(formFile.Name.IndexOf(".",
// StringComparison.Ordinal) + 1));
// if (property != null)
// {
// if (property.GetCustomAttribute(typeof(DisplayAttribute)) is
// DisplayAttribute displayAttribute)
// {
// fieldDisplayName = $"{displayAttribute.Name} ";
// }
// }
// // Don't trust the file name sent by the client. To display
// // the file name, HTML-encode the value.
// var trustedFileNameForDisplay = WebUtility.HtmlEncode(
// formFile.FileName);
// // Check the file length. This check doesn't catch files that only have
// // a BOM as their content.
// if (formFile.Length == 0)
// {
// modelState.AddModelError(formFile.Name,
// $"{fieldDisplayName}({trustedFileNameForDisplay}) is empty.");
// return Array.Empty<byte>();
// }
// if (formFile.Length > sizeLimit)
// {
// var megabyteSizeLimit = sizeLimit / 1048576;
// modelState.AddModelError(formFile.Name,
// $"{fieldDisplayName}({trustedFileNameForDisplay}) exceeds " +
// $"{megabyteSizeLimit:N1} MB.");
// return Array.Empty<byte>();
// }
// try
// {
// using (var memoryStream = new MemoryStream())
// {
// await formFile.CopyToAsync(memoryStream);
// // Check the content length in case the file's only
// // content was a BOM and the content is actually
// // empty after removing the BOM.
// if (memoryStream.Length == 0)
// {
// modelState.AddModelError(formFile.Name,
// $"{fieldDisplayName}({trustedFileNameForDisplay}) is empty.");
// }
// if (!IsValidFileExtensionAndSignature(
// formFile.FileName, memoryStream, permittedExtensions))
// {
// modelState.AddModelError(formFile.Name,
// $"{fieldDisplayName}({trustedFileNameForDisplay}) file " +
// "type isn't permitted or the file's signature " +
// "doesn't match the file's extension.");
// }
// else
// {
// return memoryStream.ToArray();
// }
// }
// }
// catch (Exception ex)
// {
// modelState.AddModelError(formFile.Name,
// $"{fieldDisplayName}({trustedFileNameForDisplay}) upload failed. " +
// $"Please contact the Help Desk for support. Error: {ex.HResult}");
// }
// return Array.Empty<byte>();
// public static List<FileModel>? ToFileModel(FileList? fileList) {
// if (fileList == null) return null;
// var fL = fileList.GetFileList();
// if (fL == null) return null;
// var ret = new List<FileModel>();
// foreach (var f in fL) {
// if (f.File == null) continue;
// ret.Add(ToFileModel(f));
// };
// return ret.OrderBy(x => x.LastModified).ToList();
// }
public static List<FileModel>? ToFileModel(FileList? fileList, Dictionary<string, FileList?>? productionFiles = null, Dictionary<string, FileList?>? usedFiles = null) {
if (fileList == null) return null;
var fL = fileList.GetFileList();
if (fL == null) return null;
var ret = new List<FileModel>();
foreach (var f in fL) {
if (f.File == null) continue;
ret.Add(ToFileModel(f, productionFiles, usedFiles));
};
return ret.OrderBy(x => x.LastModified).ToList();
}
// // TODO: File State IsValid
// public static FileModel ToFileModel(XMLRootDocument document) {
// string id = document.Prefix;
// var model = new FileModel(document.FileName, document.File.LastModified.LocalDateTime, true) {
// Fields = document.Fields,
// Messages = document.GetLog(),
// Prefix = id
// };
// return model;
// }
public static FileModel ToFileModel(XMLRootDocument document, Dictionary<string, FileList?>? productionFiles = null, Dictionary<string, FileList?>? usedFiles = null) {
string id = document.Prefix;
bool inProduction = false;
if (productionFiles != null && productionFiles.ContainsKey(id)) {
inProduction = productionFiles[id]!.Contains(document);
}
bool isUsed = false;
if (usedFiles != null && usedFiles.ContainsKey(id)) {
isUsed = usedFiles[id]!.Contains(document);
}
var model = new FileModel(document.FileName, document.Prefix, document.File.LastModified.LocalDateTime, isUsed, inProduction) { Fields = document.Fields };
model.Messages = document.GetLog();
return model;
}
public static async Task<byte[]?> ProcessStreamedFile(
MultipartSection section, ContentDispositionHeaderValue contentDisposition,
ModelStateDictionary modelState, string[] permittedExtensions, long sizeLimit) {
public static bool ProcessFile(
Stream file,
string fileName,
StringBuilder errorMessages,
string[] permittedExtensions,
long sizeLimit) {
try {
using (var memoryStream = new MemoryStream()) {
await section.Body.CopyToAsync(memoryStream);
// Check if the file is empty or exceeds the size limit.
if (memoryStream.Length == 0)
modelState.AddModelError("Error", "The file is empty.");
else if (memoryStream.Length > sizeLimit) {
var megabyteSizeLimit = sizeLimit / 1048576;
modelState.AddModelError("Error", $"The file exceeds {megabyteSizeLimit:N1} MB.");
}
// Check file extension and first bytes
else if (!IsValidFileExtensionAndSignature(contentDisposition.FileName.Value, memoryStream, permittedExtensions))
modelState.AddModelError("Error", "The file must be of the following specs:<br>" +
"1. The file must hava a .xml File-Extension<br>" +
"2. To make sure the file isn't executable the file must start with: <?xml version=\"1.0\" encoding=\"utf-8\"?> or <?xml version=\"1.0\"?>");
// Return the File as a byte array
else return memoryStream.ToArray();
// Check if the file is empty or exceeds the size limit.
if (file.Length == 0) {
errorMessages.AppendLine("Die Datei ist leer.");
return false;
}
else if (file.Length > sizeLimit) {
var megabyteSizeLimit = sizeLimit / 1048576;
errorMessages.AppendLine($"Die Datei überschreitet das Größenlimit {megabyteSizeLimit:N1} MB.");
return false;
}
} catch (Exception ex) {
modelState.AddModelError("Error", $"The upload failed. Error: {ex.Message}");
}
return null;
// Return orderly, if signature & extension okay
else return IsValidFileExtensionAndSignature(fileName, file, errorMessages, permittedExtensions);
} catch (Exception ex) {
errorMessages.AppendLine($"The upload failed. Error: {ex.Message}");
return false;
}
}
public static string? StreamToString(System.IO.Stream stream, ModelStateDictionary modelState) {
@@ -216,22 +118,26 @@ public static class XMLFileHelpers {
}
}
private static bool IsValidFileExtensionAndSignature(string fileName, Stream data, string[] permittedExtensions) {
private static bool IsValidFileExtensionAndSignature(string fileName, Stream data, StringBuilder errorMessages, string[] permittedExtensions) {
if (string.IsNullOrEmpty(fileName) || data == null || data.Length == 0)
return false;
var ext = Path.GetExtension(fileName).ToLowerInvariant();
if (string.IsNullOrEmpty(ext) || !permittedExtensions.Contains(ext))
if (string.IsNullOrEmpty(ext) || !permittedExtensions.Contains(ext)) {
errorMessages.AppendLine("Dateiname endet nicht auf .xml");
return false;
}
data.Position = 0;
using (var reader = new BinaryReader(data)) {
var signatures = _fileSignature[ext];
var headerBytes = reader.ReadBytes(signatures.Max(m => m.Length));
return signatures.Any(signature =>
headerBytes.Take(signature.Length).SequenceEqual(signature));
if (!signatures.Any(signature =>
headerBytes.Take(signature.Length).SequenceEqual(signature))) {
errorMessages.AppendLine("Datei muss mit <?xml version=\"1.0\" encoding=\"utf-8\"?> oder <?xml version=\"1.0\"?> beginnen.");
return false;
};
}
return true;
}
}

View File

@@ -0,0 +1,228 @@
namespace HaWeb.FileHelpers;
using Microsoft.Extensions.FileProviders;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using HaWeb.Models;
using HaWeb.XMLParser;
using HaWeb.XMLTests;
using System.Xml.Linq;
using System.Runtime.InteropServices;
using System.Diagnostics;
// XMLProvider provides a wrapper around the available XML data on a FILE basis
public class XMLFileProvider : IXMLFileProvider {
private readonly IHaDocumentWrappper _Lib;
private readonly IXMLInteractionService _XMLService;
private IFileProvider _hamannFileProvider;
private IFileProvider _bareRepositoryFileProvider;
private IFileProvider _workingTreeFileProvider;
private string _Branch;
private List<IFileInfo>? _WorkingTreeFiles;
private List<IFileInfo>? _HamannFiles;
private static (DateTime PullTime, string Hash)? _GitData;
// Startup (LAST)
public XMLFileProvider(IXMLInteractionService xmlservice, IHaDocumentWrappper _lib, IConfiguration config) {
// TODO: Test Read / Write Access
_Lib = _lib;
_XMLService = xmlservice;
_Branch = config.GetValue<string>("RepositoryBranch");
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) {
_hamannFileProvider = new PhysicalFileProvider(config.GetValue<string>("HamannFileStoreWindows"));
_bareRepositoryFileProvider = new PhysicalFileProvider(config.GetValue<string>("BareRepositoryPathWindows"));
_workingTreeFileProvider = new PhysicalFileProvider(config.GetValue<string>("WorkingTreePathWindows"));
}
else {
_hamannFileProvider = new PhysicalFileProvider(config.GetValue<string>("HamannFileStoreLinux"));
_bareRepositoryFileProvider = new PhysicalFileProvider(config.GetValue<string>("BareRepositoryPathLinux"));
_workingTreeFileProvider = new PhysicalFileProvider(config.GetValue<string>("WorkingTreePathLinux"));
}
// Create File Lists; Here and in xmlservice, which does preliminary checking
Scan();
if (_WorkingTreeFiles != null && _WorkingTreeFiles.Any()) {
xmlservice.Collect(_WorkingTreeFiles);
}
_HamannFiles = _ScanHamannFiles();
// Check if hamann file already is current working tree status
// -> YES: Load up the file via _lib.SetLibrary();
if (_IsAlreadyParsed()) {
_Lib.SetLibrary(_HamannFiles.First(), null, null);
if (_Lib.GetLibrary() != null) return;
}
// -> NO: Try to create a new file
var created = xmlservice.TryCreate();
if (created != null) {
var file = SaveHamannFile(created, _hamannFileProvider.GetFileInfo("./").PhysicalPath, null);
if (file != null) {
_lib.SetLibrary(file, created.Document, null);
if (_Lib.GetLibrary() != null) return;
}
}
// It failed, so use the last best File:
else if (_HamannFiles != null && _HamannFiles.Any()) {
_Lib.SetLibrary(_HamannFiles.First(), null, null);
if (_Lib.GetLibrary() != null) return;
}
// -> There is none? Use Fallback:
else {
var options = new HaWeb.Settings.HaDocumentOptions();
if (_lib.SetLibrary(null, null, null) == null) {
throw new Exception("Die Fallback Hamann.xml unter " + options.HamannXMLFilePath + " kann nicht geparst werden.");
}
}
}
public void Reload(IConfiguration config) {
_Branch = config.GetValue<string>("RepositoryBranch");
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) {
_hamannFileProvider = new PhysicalFileProvider(config.GetValue<string>("HamannFileStoreWindows"));
_bareRepositoryFileProvider = new PhysicalFileProvider(config.GetValue<string>("BareRepositoryPathWindows"));
_workingTreeFileProvider = new PhysicalFileProvider(config.GetValue<string>("WorkingTreePathWindows"));
}
else {
_hamannFileProvider = new PhysicalFileProvider(config.GetValue<string>("HamannFileStoreLinux"));
_bareRepositoryFileProvider = new PhysicalFileProvider(config.GetValue<string>("BareRepositoryPathLinux"));
_workingTreeFileProvider = new PhysicalFileProvider(config.GetValue<string>("WorkingTreePathLinux"));
}
// Create File Lists; Here and in xmlservice, which does preliminary checking
Scan();
if (_WorkingTreeFiles != null && _WorkingTreeFiles.Any()) {
_XMLService.Collect(_WorkingTreeFiles);
}
_HamannFiles = _ScanHamannFiles();
// Check if hamann file already is current working tree status
// -> YES: Load up the file via _lib.SetLibrary();
if (_IsAlreadyParsed()) {
_Lib.SetLibrary(_HamannFiles.First(), null, null);
if (_Lib.GetLibrary() != null) return;
}
// -> NO: Try to create a new file
var created = _XMLService.TryCreate();
if (created != null) {
var file = SaveHamannFile(created, _hamannFileProvider.GetFileInfo("./").PhysicalPath, null);
if (file != null) {
_Lib.SetLibrary(file, created.Document, null);
if (_Lib.GetLibrary() != null) return;
}
}
// It failed, so use the last best File:
else if (_HamannFiles != null && _HamannFiles.Any()) {
_Lib.SetLibrary(_HamannFiles.First(), null, null);
if (_Lib.GetLibrary() != null) return;
}
// -> There is none? Use Fallback:
else {
var options = new HaWeb.Settings.HaDocumentOptions();
if (_Lib.SetLibrary(null, null, null) == null) {
throw new Exception("Die Fallback Hamann.xml unter " + options.HamannXMLFilePath + " kann nicht geparst werden.");
}
}
}
// Getters and Setters
public List<IFileInfo>? GetWorkingTreeFiles() => _WorkingTreeFiles;
public (DateTime PullTime, string Hash)? GetGitData() => _GitData;
public List<IFileInfo>? GetHamannFiles() => this._HamannFiles;
// Functions
public void DeleteHamannFile(string filename) {
if (_HamannFiles == null) return;
var files = _HamannFiles.Where(x => x.Name == filename);
foreach (var file in files) {
File.Delete(file.PhysicalPath);
}
_HamannFiles.RemoveAll(x => x.Name == filename);
}
public void Scan() {
_WorkingTreeFiles = _ScanWorkingTreeFiles();
_GitData = _ScanGitData();
}
public IFileInfo? SaveHamannFile(XElement element, string basefilepath, ModelStateDictionary? ModelState) {
if (!_GitData.HasValue) return null;
var filename = "hamann_" + _GitData.Value.PullTime.Year + "-" + _GitData.Value.PullTime.Month + "-" + _GitData.Value.PullTime.Day + "_" + _GitData.Value.PullTime.Hour + "-" + _GitData.Value.PullTime.Minute + "." + _GitData.Value.Hash.Substring(0,7) + ".xml";
var path = Path.Combine(basefilepath, filename);
try {
if (!Directory.Exists(basefilepath))
Directory.CreateDirectory(basefilepath);
using (var targetStream = System.IO.File.Create(path))
element.Save(targetStream, SaveOptions.DisableFormatting);
} catch (Exception ex) {
if (ModelState != null) ModelState.AddModelError("Error", "Die Datei konnte nicht gespeichert werden: " + ex.Message);
return null;
}
var info = _hamannFileProvider.GetFileInfo(filename);
if (info == null) {
if (ModelState != null) ModelState.AddModelError("Error", "Auf die neu erstellte Datei konnte nicht zugegriffen werden.");
return null;
}
if (_HamannFiles == null) _HamannFiles = new List<IFileInfo>();
_HamannFiles.RemoveAll(x => x.Name == info.Name);
_HamannFiles.Add(info);
return info;
}
public bool HasChanged() {
if (!_GitData.HasValue) return true;
var current = _ScanGitData();
if (current.Item2 != _GitData.Value.Hash) {
_GitData = current;
return true;
}
return false;
}
private (DateTime, string) _ScanGitData() {
var head = _bareRepositoryFileProvider.GetFileInfo("refs/heads/" + _Branch);
return (head.LastModified.DateTime, File.ReadAllText(head.PhysicalPath));
}
private void _RegisterChangeCallbacks() {
var cT = _bareRepositoryFileProvider.Watch("refs/heads/" + _Branch);
}
// Gets all XML Files
private List<IFileInfo>? _ScanWorkingTreeFiles() {
var files = _workingTreeFileProvider.GetDirectoryContents(string.Empty)!.Where(x => !x.IsDirectory && x.Name.EndsWith(".xml"))!.ToList();
return files;
}
private List<IFileInfo>? _ScanHamannFiles() {
var files = _hamannFileProvider.GetDirectoryContents(string.Empty).Where(x => !x.IsDirectory && x.Name.StartsWith("hamann") && x.Name.EndsWith(".xml"));
if (files == null || !files.Any()) return null;
return files.OrderByDescending(x => x.LastModified).ToList();
}
private string? _GetHashFromHamannFilename(string filename) {
var s = filename.Split('.', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
if (s.Length != 3 || s.Last() != "xml" || !s.First().StartsWith("hamann")) return null;
return s[1];
}
private bool _IsAlreadyParsed() {
if (_HamannFiles == null || !_HamannFiles.Any() || !_GitData.HasValue) return false;
var fhash = _GetHashFromHamannFilename(_HamannFiles.First().Name);
var ghash = _GitData.Value.Hash.Substring(0,7);
return fhash == ghash;
}
}

View File

@@ -1,146 +0,0 @@
namespace HaWeb.FileHelpers;
using Microsoft.Extensions.FileProviders;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using HaWeb.Models;
using HaWeb.XMLParser;
using HaWeb.XMLTests;
using System.Xml.Linq;
// XMLProvider provides a wrapper around the available XML data on a FILE basis
public class XMLProvider : IXMLProvider {
private IFileProvider _fileProvider;
private Dictionary<string, FileList?>? _Files;
private Dictionary<string, IXMLRoot>? _Roots;
private List<IFileInfo>? _HamannFiles;
private Stack<IFileInfo>? _InProduction;
public XMLProvider(IFileProvider provider, IXMLService xmlservice, IXMLTestService testService) {
_fileProvider = provider;
_Roots = xmlservice.GetRootsDictionary();
_Files = _ScanFiles();
_HamannFiles = _ScanHamannFiles();
if (_Files != null)
foreach (var category in _Files)
if (category.Value != null)
xmlservice.AutoUse(category.Value);
testService.Test();
}
public List<IFileInfo>? GetHamannFiles() => this._HamannFiles;
public IFileInfo? GetInProduction() {
if (_InProduction == null || !_InProduction.Any()) return null;
return this._InProduction.Peek();
}
public void DeleteHamannFile(string filename) {
if (_HamannFiles == null) return;
var files = _HamannFiles.Where(x => x.Name == filename);
foreach (var file in files) {
File.Delete(file.PhysicalPath);
}
_HamannFiles.RemoveAll(x => x.Name == filename);
}
public void SetInProduction(IFileInfo info) {
if (_InProduction == null) _InProduction = new Stack<IFileInfo>();
_InProduction.Push(info);
}
public FileList? GetFiles(string prefix)
=> _Files != null && _Files.ContainsKey(prefix) ? _Files[prefix] : null;
// Saves a Document as file and adds it to the collection
public async Task Save(XMLRootDocument doc, string basefilepath, ModelStateDictionary ModelState) {
var type = doc.Prefix;
var directory = Path.Combine(basefilepath, type);
var path = Path.Combine(directory, doc.FileName);
try {
if (!Directory.Exists(directory))
Directory.CreateDirectory(directory);
using (var targetStream = System.IO.File.Create(path))
await doc.Save(targetStream, ModelState);
} catch (Exception ex) {
ModelState.AddModelError("Error", "Speichern der Datei fehlgeschlagen: " + ex.Message);
return;
}
var info = _fileProvider.GetFileInfo(Path.Combine(doc.Prefix, doc.FileName));
if (info == null) {
ModelState.AddModelError("Error", "Auf die neu erstellte Datei konnte nicht zugegriffen werden.");
return;
}
doc.File = info;
if (_Files == null) _Files = new Dictionary<string, FileList?>();
if (!_Files.ContainsKey(doc.Prefix)) _Files.Add(doc.Prefix, new FileList(doc.XMLRoot));
_Files[doc.Prefix]!.Add(doc);
}
public async Task<IFileInfo?> SaveHamannFile(XElement element, string basefilepath, ModelStateDictionary ModelState) {
var date = DateTime.Now;
var filename = "hamann_" + date.Year + "-" + date.Month + "-" + date.Day + "." + Path.GetRandomFileName() + ".xml";
var directory = Path.Combine(basefilepath, "hamann");
var path = Path.Combine(directory, filename);
try {
if (!Directory.Exists(directory))
Directory.CreateDirectory(directory);
using (var targetStream = System.IO.File.Create(path))
await element.SaveAsync(targetStream, SaveOptions.DisableFormatting, new CancellationToken());
} catch (Exception ex) {
ModelState.AddModelError("Error", "Die Datei konnte nicht gespeichert werden: " + ex.Message);
return null;
}
var info = _fileProvider.GetFileInfo(Path.Combine("hamann", filename));
if (info == null) {
ModelState.AddModelError("Error", "Auf die neu erstellte Dtaei konnte nicht zugegriffen werden.");
return null;
}
if (_HamannFiles == null) _HamannFiles = new List<IFileInfo>();
_HamannFiles.RemoveAll(x => x.Name == info.Name);
_HamannFiles.Add(info);
return info;
}
private Dictionary<string, FileList?>? _ScanFiles() {
if (_Roots == null) return null;
Dictionary<string, FileList?>? res = null;
var dirs = _fileProvider.GetDirectoryContents(string.Empty).Where(x => x.IsDirectory);
foreach (var dir in dirs) {
if (_Roots.ContainsKey(dir.Name)) {
if (_Files == null) _Files = new Dictionary<string, FileList?>();
if (res == null) res = new Dictionary<string, FileList?>();
res.Add(dir.Name, _ScanFiles(dir.Name));
}
}
return res;
}
private FileList? _ScanFiles(string prefix) {
if (_Roots == null) return null;
FileList? res = null;
var files = _fileProvider.GetDirectoryContents(prefix).Where(x => !x.IsDirectory && x.Name.StartsWith(prefix) && x.Name.EndsWith(".xml"));
foreach (var file in files) {
if (_Roots == null || !_Roots.ContainsKey(prefix))
throw new Exception("Attempting to read a File from an unrecognized Prefix: " + prefix);
if (res == null) res = new FileList(_Roots[prefix]);
res.Add(new XMLRootDocument(_Roots[prefix], file));
}
return res;
}
private List<IFileInfo>? _ScanHamannFiles() {
var dir = _fileProvider.GetDirectoryContents(string.Empty).Where(x => x.IsDirectory && x.Name == "hamann");
if (dir == null || !dir.Any()) return null;
var files = _fileProvider.GetDirectoryContents(dir.First().Name).Where(x => !x.IsDirectory && x.Name.StartsWith("hamann") && x.Name.EndsWith(".xml"));
if (files == null || !files.Any()) return null;
return files.ToList();
}
}

View File

@@ -17,8 +17,6 @@ public class FileList {
throw new Exception("Diese Liste kann nur Elemente des Typs " + XMLRoot.Prefix + " enthalten");
if (_Files == null) _Files = new HashSet<XMLRootDocument>();
var replacing = _Files.Where(x => x.FileName == document.FileName);
if (replacing != null && replacing.Any()) _Files.Remove(replacing.First());
_Files.Add(document);
}

View File

@@ -1,19 +1,46 @@
using System.Text;
using Microsoft.Extensions.FileProviders;
namespace HaWeb.Models;
public class FileModel {
public string FileName { get; private set; }
public string Prefix { get; private set; }
public DateTime LastModified { get; private set; }
public bool IsUsed { get; private set; }
public bool InProduction { get; private set; }
public List<(string, string?)>? Fields { get; set; }
public string? Messages { get; set; }
public IFileInfo File { get; private set; }
public FileModel(string name, string prefix, DateTime lastModified, bool isUsed, bool inProduction) {
// This affects only repo files
public bool IsValid { get; private set; } = false;
public List<XMLRootDocument>? Content { get; set; }
public List<(string, string?)>? Fields { get; set; }
public string? Prefix { get; set; }
private StringBuilder? _log;
public FileModel(string name, IFileInfo file) {
FileName = name;
IsUsed = isUsed;
LastModified = lastModified;
InProduction = inProduction;
Prefix = prefix;
File = file;
}
public string? GetLog() {
if (_log == null) return null;
return _log.ToString();
}
public void Log(string msg) {
if (_log == null) _log = new StringBuilder();
var prefix = DateTime.Now.ToShortTimeString() + " ";
if (File != null) prefix += File.Name + ": ";
_log.AppendLine(prefix + msg);
}
public void ResetLog() {
if (_log != null) _log.Clear();
}
public void Validate() {
IsValid = true;
}
public DateTime GetLastModified() {
return File.LastModified.ToLocalTime().DateTime;
}
}

View File

@@ -7,7 +7,7 @@ using System.Collections.Generic;
public class SearchResult {
public string Search { get; private set; }
public string Index { get; private set; }
public string Identifier { get; set; }
public string? Identifier { get; set; }
public string? Page { get; set; }
public string? Line { get; set; }
public string? Preview { get; set; }

View File

@@ -1,27 +0,0 @@
namespace HaWeb.Models;
using HaWeb.XMLParser;
using Microsoft.Extensions.FileProviders;
using Microsoft.AspNetCore.Mvc.Rendering;
public class UploadViewModel {
public string ActiveTitle { get; private set; }
public string? Prefix { get; private set; }
public List<IXMLRoot>? AvailableRoots { get; private set; }
public List<FileModel>? AvailableFiles { get; set; }
public Dictionary<string, List<FileModel>?>? UsedFiles { get; private set; }
public Dictionary<string, List<FileModel>?>? ProductionFiles { get; set; }
public List<int> AvailableYears { get; private set; }
public int StartYear { get; private set; }
public int EndYear { get; private set; }
public List<FileModel>? HamannFiles { get; set; }
public UploadViewModel(string title, string? prefix, List<IXMLRoot>? roots, Dictionary<string, List<FileModel>?>? usedFiles, int startYear, int endYear, List<int> availableYears) {
Prefix = prefix;
ActiveTitle = title;
AvailableRoots = roots;
UsedFiles = usedFiles;
StartYear = startYear;
EndYear = endYear;
AvailableYears = availableYears;
}
}

View File

@@ -7,154 +7,26 @@ using HaWeb.XMLParser;
using System.Text;
public class XMLRootDocument {
private XElement? _Element;
private string? _filename;
private IFileInfo? _file;
private StringBuilder? _log;
[JsonIgnore]
public XElement? Element { get; private set; }
[JsonIgnore]
public IXMLRoot XMLRoot { get; private set; }
public FileModel File { get; private set; }
public string FileName {
get {
if (_filename == null)
_filename = _CreateFilename();
return _filename;
}
}
[JsonIgnore]
public IFileInfo? File {
get {
return _file;
}
set {
_file = value;
_Element = null;
} }
public string Prefix { get; private set; }
public DateTime Date { get; private set; }
// UNUSED AS OF NOW
public (string?, string?) IdentificationString { get; private set; }
// TODO: Fields
public List<(string, string?)>? Fields { get; set; }
// Entry point for file reading
public XMLRootDocument(IXMLRoot xmlRoot, IFileInfo file) {
XMLRoot = xmlRoot;
Prefix = xmlRoot.Prefix;
File = file;
Date = file.LastModified.LocalDateTime;
_filename = file.Name;
_GenerateFieldsFromFilename(file.Name);
}
// Entry point for XML upload reading
public XMLRootDocument(IXMLRoot xmlRoot, string prefix, (string?, string?) idString, XElement element) {
// Entry point for XML file reading
public XMLRootDocument(IXMLRoot xmlRoot, string prefix, (string?, string?) idString, XElement element, FileModel file) {
XMLRoot = xmlRoot;
Prefix = prefix;
IdentificationString = idString;
Date = DateTime.Now;
_Element = element;
File = file;
Element = element;
}
private string _CreateFilename() {
var filename = _removeInvalidChars(Prefix) + "_";
if (!String.IsNullOrWhiteSpace(IdentificationString.Item1)) {
var hash = IdentificationString.Item1.GetHashCode().ToString("X8");
filename += hash + "_";
}
if (!String.IsNullOrWhiteSpace(IdentificationString.Item2)) filename += _removeInvalidChars(IdentificationString.Item2) + "_";
filename += _removeInvalidChars(Date.Year.ToString() + "-" + Date.Month.ToString() + "-" + Date.Day.ToString()) + "." + Path.GetRandomFileName();
return filename + ".xml";
}
private string _removeInvalidChars(string? s) {
if (String.IsNullOrWhiteSpace(s)) return string.Empty;
foreach (var c in Path.GetInvalidFileNameChars()) {
s = s.Replace(c, '-');
}
s = s.Replace('_', '-');
return s;
}
private void _GenerateFieldsFromFilename(string filename) {
var split = filename.Split('_');
Prefix = split[0];
if (split.Length == 3) {
IdentificationString = (null, split[1]);
} else if (split.Length == 4) {
IdentificationString = (split[1], split[2]);
} else {
IdentificationString = (null, null);
}
}
public string? GetLog() {
if (_log == null) return null;
return _log.ToString();
}
public void Log(string msg) {
if (_log == null) _log = new StringBuilder();
var prefix = DateTime.Now.ToShortTimeString() + " ";
if (File != null) prefix += File.Name + ": ";
_log.AppendLine(prefix + msg);
}
public void ResetLog() {
if (_log != null) _log.Clear();
}
// Call on UnUse to prevent memory hogging
public void UnUse() {
_Element = null;
_log = null;
}
public XElement GetElement() {
if (_Element == null)
_Element = _GetElement();
return _Element;
}
private XElement _GetElement() {
if (File == null || String.IsNullOrWhiteSpace(File.PhysicalPath) || !File.Exists)
throw new Exception("Es ist kein Pfad für die XML-Datei vorhanden.");
if (XMLRoot == null)
throw new Exception("Kein gültiges Hamann-Dokument: " + File.PhysicalPath + "Vom Prefix: " + Prefix);
XDocument? doc = null;
try {
doc = XDocument.Load(File.PhysicalPath, LoadOptions.PreserveWhitespace | LoadOptions.SetLineInfo);
} catch (Exception ex) {
throw new Exception("Fehler beim Lesen des Dokuments: " + ex.Message);
}
if (doc == null || doc.Root == null)
throw new Exception("Das Dokument ist ungültig und kann nicht gelesen werden: " + File.PhysicalPath);
var element = XMLRoot.IsTypeOf(doc.Root);
if (element == null || !element.Any())
throw new Exception("Kein gültiges Hamann-Dokument: " + File.PhysicalPath + "Vom Prefix: " + Prefix);
return element.First();
}
public async Task Save(Stream stream, ModelStateDictionary state) {
if (XMLRoot == null) {
state.AddModelError("Error", "No corresponding Root Element found.");
return;
}
if (_Element == null) {
if (File == null) {
state.AddModelError("Error", "There is neither a file nor a saved element for this Document aborting the save.");
return;
}
_Element = GetElement();
}
await XMLRoot.CreateHamannDocument(_Element).SaveAsync(stream, SaveOptions.DisableFormatting, new CancellationToken());
}
}

View File

@@ -0,0 +1,36 @@
namespace HaWeb.Models;
using HaWeb.XMLParser;
using Microsoft.Extensions.FileProviders;
using Microsoft.AspNetCore.Mvc.Rendering;
public class XMLStateViewModel {
// Titel der Seite / Aktiver Präfix
public string ActiveTitle { get; private set; }
public IFileInfo? ActiveFile { get; set; }
public (DateTime PullTime, string Hash)? GitData { get; private set; }
public bool ValidState { get; private set; }
// Verfügbare Datei-Typen
public List<IXMLRoot>? AvailableRoots { get; private set; }
// Akuell geladene Dateien
public List<FileModel>? ManagedFiles { get; private set; }
// Verfügbare (Gesamt-)Dateien
public List<IFileInfo>? HamannFiles { get; set; }
public XMLStateViewModel(
string title,
(DateTime PullTime, string Hash)? gitData,
List<IXMLRoot>? roots,
List<IFileInfo>? hamannFiles,
List<FileModel>? managedFiles,
bool validState) {
ActiveTitle = title;
AvailableRoots = roots;
HamannFiles = hamannFiles;
ManagedFiles = managedFiles;
GitData = gitData;
ValidState = validState;
}
}

View File

@@ -4,42 +4,64 @@ using HaDocument.Interfaces;
using HaWeb.XMLParser;
using HaWeb.XMLTests;
using HaWeb.FileHelpers;
using HaWeb.BackgroundTask;
using Microsoft.FeatureManagement;
using System.Runtime.InteropServices;
using Microsoft.Extensions.FileProviders;
using Microsoft.AspNetCore.HttpOverrides;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Primitives;
var builder = WebApplication.CreateBuilder(args);
List<string> configpaths = new List<string>();
// Add additional configuration
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) {
var p = builder.Configuration.GetValue<string>("WorkingTreePathWindows") + "settings.json";
configpaths.Add(p);
builder.Configuration.AddJsonFile(p, optional: true, reloadOnChange: true);
} else {
var p = builder.Configuration.GetValue<string>("WorkingTreePathLinux") + "settings.json";
configpaths.Add(p);
builder.Configuration.AddJsonFile(p, optional: true, reloadOnChange: true);
}
// Create initial Data
var tS = new XMLTestService();
var XMLIS = new XMLInteractionService(builder.Configuration, tS);
var hdW = new HaDocumentWrapper(XMLIS, builder.Configuration);
var XMLFP = new XMLFileProvider(XMLIS, hdW, builder.Configuration);
// Add services to the container.
builder.Services.AddControllersWithViews();
builder.Services.AddHttpContextAccessor();
// // To get files from a path provided by configuration:
// TODO: Test Read / Write Access
string? filepath = null;
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) {
filepath = builder.Configuration.GetValue<string>("StoredFilePathWindows");
}
else {
filepath = builder.Configuration.GetValue<string>("StoredFilePathLinux");
}
if (filepath == null) {
throw new Exception("You need to set a specific Filepath, either StoredFilePathWindows or StoredFilePathLinux");
}
var physicalProvider = new PhysicalFileProvider(filepath);
builder.Services.AddSingleton<IFileProvider>(physicalProvider);
builder.Services.AddTransient<IReaderService, ReaderService>();
builder.Services.AddSingleton<IXMLProvider, XMLProvider>();
builder.Services.AddSingleton<IXMLService, XMLService>();
builder.Services.AddSingleton<HaWeb.FileHelpers.IHaDocumentWrappper, HaWeb.FileHelpers.HaDocumentWrapper>();
builder.Services.AddSingleton<IXMLTestService, XMLTestService>();
builder.Services.AddSingleton<IXMLTestService, XMLTestService>((_) => tS);
builder.Services.AddSingleton<IXMLInteractionService, XMLInteractionService>((_) => XMLIS);
builder.Services.AddSingleton<IHaDocumentWrappper, HaDocumentWrapper>((_) => hdW);
builder.Services.AddSingleton<IXMLFileProvider, XMLFileProvider>(_ => XMLFP);
// builder.Services.AddSingleton<IConfigurationMonitor, ConfigurationMonitor>();
// builder.Services.AddHostedService<QueuedHostedService>();
// builder.Services.AddSingleton<IBackgroundTaskQueue>(ctx =>
// {
// if (!int.TryParse(builder.Configuration["QueueCapacity"], out var queueCapacity))
// queueCapacity = 100;
// return new BackgroundTaskQueue(queueCapacity);
// });
// builder.Services.AddSingleton<IMonitorLoop, MonitorLoop>();
builder.Services.AddFeatureManagement();
var app = builder.Build();
// Reload config on change
var cM = new ConfigurationMonitor(configpaths.ToArray(), app.Services);
ChangeToken.OnChange(
() => app.Configuration.GetReloadToken(),
(state) => cM.InvokeChanged(state),
configpaths.ToArray()
);
// Configure the HTTP request pipeline.
if (!app.Environment.IsDevelopment())
{
@@ -47,6 +69,7 @@ if (!app.Environment.IsDevelopment())
// The default HSTS value is 30 days. You may want to change this for production scenarios, see https://aka.ms/aspnetcore-hsts.
app.UseHsts();
app.UseHttpsRedirection();
app.UseForwardedHeaders(new ForwardedHeadersOptions{ForwardedHeaders = ForwardedHeaders.XForwardedFor | ForwardedHeaders.XForwardedProto});
}
app.UseAuthorization();

View File

@@ -35,7 +35,7 @@ public class SearchRules {
if (sb.Length >= sw.Length) {
if (sb.ToString().Contains(sw)) {
if (reader.State.Results == null)
reader.State.Results = new List<(string Page, string Line, string Identifier)>();
reader.State.Results = new List<(string Page, string Line, string? Identifier)>();
reader.State.Results.Add((reader.CurrentPage, reader.CurrentLine, reader.State.CurrentIdentifier));
}
sb.Remove(0, sb.Length - sw.Length);

View File

@@ -7,7 +7,7 @@ public class SearchState : HaWeb.HTMLParser.IState {
internal string? CurrentIdentifier;
internal ILibrary? Lib;
internal bool Normalize;
internal List<(string Page, string Line, string Identifier)>? Results;
internal List<(string Page, string Line, string? Identifier)>? Results;
public SearchState(string searchword, bool normalize = false, ILibrary? lib = null) {
Lib = lib;

View File

@@ -6,11 +6,14 @@ using HaWeb.XMLTests;
public class AppNode : INodeRule
{
public string Name => "app";
public string XPath => "//app";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "ueberlieferung" },
XPath = "//app"
};
public string[]? Attributes { get; } = { "ref" };
public string? uniquenessAttribute => null;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
("ref", "//appDef", "index")
("ref", new HamannXPath() { Documents = new[] { "personenorte" }, XPath = "//appDef" }, "index")
};
}

View File

@@ -6,10 +6,13 @@ using HaWeb.XMLTests;
public class AutopsicNode : INodeRule
{
public string Name => "autopsic";
public string XPath => "//autopsic";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "metadaten" },
XPath = "//autopsic"
};
public string[]? Attributes { get; } = { "value" };
public string? uniquenessAttribute => "value" ;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
};
}

View File

@@ -6,11 +6,14 @@ using HaWeb.XMLTests;
public class EditNode : INodeRule
{
public string Name => "edit";
public string XPath => "//edit";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "brieftext", "texteingriffe", "ueberlieferung" },
XPath = "//edit"
};
public string[]? Attributes { get; } = { "ref" };
public string? uniquenessAttribute => null;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
("ref", "//editreason", "index")
("ref", new HamannXPath() { Documents = new[] { "texteingriffe" }, XPath = "//editreason" }, "index")
};
}

View File

@@ -6,11 +6,14 @@ using HaWeb.XMLTests;
public class HandNode : INodeRule
{
public string Name => "hand";
public string XPath => "//hand";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "ueberlieferung", "brieftext", "texteingriffe" },
XPath = "//hand"
};
public string[]? Attributes { get; } = { "ref" };
public string? uniquenessAttribute => null;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
("ref", "//handDef", "index")
("ref", new HamannXPath() { Documents = new[] { "personenorte" }, XPath = "//handDef" }, "index")
};
}

View File

@@ -6,10 +6,13 @@ using HaWeb.XMLTests;
public class KommentarNode : INodeRule
{
public string Name => "kommentar";
public string XPath => "//kommentar";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "register" },
XPath = "//kommentar"
};
public string[]? Attributes { get; } = { "id" };
public string? uniquenessAttribute => "id" ;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
};
}

View File

@@ -6,10 +6,13 @@ using HaWeb.XMLTests;
public class LetterDescNode : INodeRule
{
public string Name => "letterDesc";
public string XPath => "//letterDesc";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "metadaten" },
XPath = "//letterDesc"
};
public string[]? Attributes { get; } = { "ref" };
public string? uniquenessAttribute => "ref" ;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
};
}

View File

@@ -6,10 +6,13 @@ using HaWeb.XMLTests;
public class LetterTextNode : INodeRule
{
public string Name => "letterText";
public string XPath => "//letterText";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "brieftext" },
XPath = "//letterText"
};
public string[]? Attributes { get; } = { "index" };
public string? uniquenessAttribute => "index" ;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
};
}

View File

@@ -6,10 +6,13 @@ using HaWeb.XMLTests;
public class LetterTraditionNode : INodeRule
{
public string Name => "letterTradition";
public string XPath => "//letterTradition";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "ueberlieferung" },
XPath = "//letterTradition"
};
public string[]? Attributes { get; } = { "ref" };
public string? uniquenessAttribute => "ref" ;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
};
}

View File

@@ -6,12 +6,15 @@ using HaWeb.XMLTests;
public class LinkNode : INodeRule
{
public string Name => "link";
public string XPath => "//link";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "ueberlieferung", "stellenkommentar", "register", "texteingriffe" },
XPath = "//link"
};
public string[]? Attributes { get; } = null;
public string? uniquenessAttribute => null;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
("ref", "//kommentar", "id"),
("subref", "//subsection", "id")
("ref", new HamannXPath() { Documents = new[] { "register" }, XPath = "//kommentar" }, "id"),
("subref", new HamannXPath() { Documents = new[] { "register" }, XPath = "//subsection" }, "id"),
};
}

View File

@@ -6,10 +6,13 @@ using HaWeb.XMLTests;
public class MarginalNode : INodeRule
{
public string Name => "marginal";
public string XPath => "//marginal";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "stellenkommentar" },
XPath = "//marginal"
};
public string[]? Attributes { get; } = { "index", "letter", "page", "line" };
public string? uniquenessAttribute => "index";
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
};
}

View File

@@ -6,11 +6,14 @@ using HaWeb.XMLTests;
public class Receiver : INodeRule
{
public string Name => "receiver";
public string XPath => "//receiver";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "metadaten" },
XPath = "//receiver"
};
public string[]? Attributes { get; } = { "ref" };
public string? uniquenessAttribute => null;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
("ref", "//personDef", "index")
("ref", new HamannXPath() { Documents = new[] { "personenorte" }, XPath = "//personDef" }, "index")
};
}

View File

@@ -6,11 +6,14 @@ using HaWeb.XMLTests;
public class SenderNode : INodeRule
{
public string Name => "sender";
public string XPath => "//sender";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "metadaten" },
XPath = "//sender"
};
public string[]? Attributes { get; } = { "ref" };
public string? uniquenessAttribute => null;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
("ref", "//personDef", "index")
("ref", new HamannXPath() { Documents = new[] { "personenorte" }, XPath = "//personDef" }, "index")
};
}

View File

@@ -5,8 +5,15 @@ namespace HaWeb.Settings.NodeRules;
public class StructureCollection : ICollectionRule {
public string Name { get; } = "structure";
public string[] Bases { get; } = { "//letterText", "//letterTradition" };
public string[] Backlinks { get; } = { "//intlink", "//marginal" };
public HamannXPath[] Bases { get; } = {
new HamannXPath() { Documents = new[] { "brieftext" }, XPath = "//letterText" },
new HamannXPath() { Documents = new[] { "ueberlieferung" }, XPath = "//letterTradition"}
};
public HamannXPath[] Backlinks { get; } = {
new HamannXPath() { Documents = new[] { "stellenkommentar", "ueberlieferung", "texteingriffe", "register" }, XPath = "//intlink" },
new HamannXPath() { Documents = new[] { "stellenkommentar" }, XPath = "//marginal"}
};
public IEnumerable<(string, XElement, XMLRootDocument)> GenerateIdentificationStrings(IEnumerable<(XElement, XMLRootDocument)> list) {
foreach (var e in list) {
var id = e.Item1.Name == "letterText" ? e.Item1.Attribute("index")!.Value : e.Item1.Attribute("ref")!.Value;

View File

@@ -6,10 +6,13 @@ using HaWeb.XMLTests;
public class SubsectionNode : INodeRule
{
public string Name => "subsection";
public string XPath => "//subsection";
public HamannXPath XPath => new HamannXPath() {
Documents = new[] { "register" },
XPath = "//subsection"
};
public string[]? Attributes { get; } = { "id" };
public string? uniquenessAttribute => "id" ;
public List<(string, string, string)>? References { get; } = new List<(string, string, string)>()
public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>()
{
};
}

View File

@@ -6,7 +6,7 @@ using HaWeb.XMLParser;
public class CommentRoot : HaWeb.XMLParser.IXMLRoot {
public string Type { get; } = "Register";
public string Prefix { get; } = "register";
public string[] XPathContainer { get; } = { ".//data//kommentare/kommcat", ".//kommentare/kommcat" };
public string[] XPathContainer { get; } = { "/opus/data//kommentare/kommcat", "/opus//kommentare/kommcat" };
public Predicate<XElement> IsCollectedObject { get; } = (elem) => {
if (elem.Name == "kommentar") return true;
@@ -39,7 +39,7 @@ public class CommentRoot : HaWeb.XMLParser.IXMLRoot {
public void MergeIntoFile(XElement file, XMLRootDocument document) {
if (file.Element("kommentare") == null)
file.AddFirst(new XElement("kommentare"));
file.Element("kommentare")!.AddFirst(document.GetElement());
file.Element("kommentare")!.AddFirst(document.Element);
}
}

View File

@@ -6,7 +6,7 @@ using HaWeb.XMLParser;
public class DescriptionsRoot : HaWeb.XMLParser.IXMLRoot {
public string Type { get; } = "Metadaten";
public string Prefix { get; } = "metadaten";
public string[] XPathContainer { get; } = { ".//data/descriptions", ".//descriptions" };
public string[] XPathContainer { get; } = { "/opus/data/descriptions", "/opus/descriptions" };
public Predicate<XElement> IsCollectedObject { get; } = (elem) => {
if (elem.Name == "letterDesc") return true;
@@ -34,7 +34,7 @@ public class DescriptionsRoot : HaWeb.XMLParser.IXMLRoot {
public void MergeIntoFile(XElement file, XMLRootDocument document) {
if (file.Element("descriptions") == null)
file.AddFirst(new XElement("descriptions"));
var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x));
var elements = document.Element.Elements().Where(x => IsCollectedObject(x));
var root = file.Element("descriptions");
foreach (var element in elements) {
root!.Add(element);

View File

@@ -7,7 +7,7 @@ using System.IO;
public class DocumentRoot : HaWeb.XMLParser.IXMLRoot {
public string Type { get; } = "Brieftext";
public string Prefix { get; } = "brieftext";
public string[] XPathContainer { get; } = { ".//data/document", ".//document" };
public string[] XPathContainer { get; } = { "/opus/data/document", "/opus/document" };
public Predicate<XElement> IsCollectedObject { get; } = (elem) => {
if (elem.Name == "letterText") return true;
@@ -35,7 +35,7 @@ public class DocumentRoot : HaWeb.XMLParser.IXMLRoot {
public void MergeIntoFile(XElement file, XMLRootDocument document) {
if (file.Element("document") == null)
file.AddFirst(new XElement("document"));
var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x));
var elements = document.Element.Elements().Where(x => IsCollectedObject(x));
var root = file.Element("document");
foreach (var element in elements) {
root!.Add(element);

View File

@@ -6,7 +6,7 @@ using HaWeb.XMLParser;
public class EditsRoot : HaWeb.XMLParser.IXMLRoot {
public string Type { get; } = "Texteingriffe";
public string Prefix { get; } = "texteingriffe";
public string[] XPathContainer { get; } = { ".//data/edits", ".//edits" };
public string[] XPathContainer { get; } = { "/opus/data/edits", "/opus/edits" };
public Predicate<XElement> IsCollectedObject { get; } = (elem) => {
if (elem.Name == "editreason") return true;
@@ -34,7 +34,7 @@ public class EditsRoot : HaWeb.XMLParser.IXMLRoot {
public void MergeIntoFile(XElement file, XMLRootDocument document) {
if (file.Element("edits") == null)
file.AddFirst(new XElement("edits"));
var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x));
var elements = document.Element.Elements().Where(x => IsCollectedObject(x));
var root = file.Element("edits");
foreach (var element in elements) {
root!.Add(element);

View File

@@ -6,7 +6,7 @@ using HaWeb.XMLParser;
public class MarginalsRoot : HaWeb.XMLParser.IXMLRoot {
public string Type { get; } = "Stellenkommentar";
public string Prefix { get; } = "stellenkommentar";
public string[] XPathContainer { get; } = { ".//data/marginalien", ".//marginalien" };
public string[] XPathContainer { get; } = { "/opus/data/marginalien", "/opus/marginalien" };
public Predicate<XElement> IsCollectedObject { get; } = (elem) => {
if (elem.Name == "marginal") return true;
@@ -34,7 +34,7 @@ public class MarginalsRoot : HaWeb.XMLParser.IXMLRoot {
public void MergeIntoFile(XElement file, XMLRootDocument document) {
if (file.Element("marginalien") == null)
file.AddFirst(new XElement("marginalien"));
var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x));
var elements = document.Element.Elements().Where(x => IsCollectedObject(x));
var root = file.Element("marginalien");
foreach (var element in elements) {
root!.Add(element);

View File

@@ -6,7 +6,7 @@ using HaWeb.XMLParser;
public class ReferencesRoot : HaWeb.XMLParser.IXMLRoot {
public string Type { get; } = "Personen / Orte";
public string Prefix { get; } = "personenorte";
public string[] XPathContainer { get; } = { ".//data/definitions", ".//definitions" };
public string[] XPathContainer { get; } = { "/opus/data/definitions", "/opus/definitions" };
public Predicate<XElement> IsCollectedObject { get; } = (elem) => {
if (elem.Name == "personDefs" || elem.Name == "structureDefs" || elem.Name == "handDefs" || elem.Name == "locationDefs" || elem.Name == "appDefs")
@@ -35,7 +35,7 @@ public class ReferencesRoot : HaWeb.XMLParser.IXMLRoot {
public void MergeIntoFile(XElement file, XMLRootDocument document) {
if (file.Element("definitions") == null)
file.AddFirst(new XElement("definitions"));
var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x));
var elements = document.Element.Elements().Where(x => IsCollectedObject(x));
var root = file.Element("definitions");
foreach (var element in elements) {
root!.Add(element);

View File

@@ -6,7 +6,7 @@ using HaWeb.XMLParser;
public class TraditionsRoot : HaWeb.XMLParser.IXMLRoot {
public string Type { get; } = "Überlieferung";
public string Prefix { get; } = "ueberlieferung";
public string[] XPathContainer { get; } = { ".//data/traditions", ".//traditions" };
public string[] XPathContainer { get; } = { "/opus/data/traditions", "/opus/traditions" };
public Predicate<XElement> IsCollectedObject { get; } = (elem) => {
if (elem.Name == "letterTradition") return true;
@@ -34,7 +34,7 @@ public class TraditionsRoot : HaWeb.XMLParser.IXMLRoot {
public void MergeIntoFile(XElement file, XMLRootDocument document) {
if (file.Element("traditions") == null)
file.AddFirst(new XElement("traditions"));
var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x));
var elements = document.Element.Elements().Where(x => IsCollectedObject(x));
var root = file.Element("traditions");
foreach (var element in elements) {
root!.Add(element);

View File

@@ -1,104 +0,0 @@
@model UploadViewModel;
@{
ViewData["Title"] = "Upload & Veröffentlichen";
ViewData["SEODescription"] = "Johann Georg Hamann: Kommentierte Briefausgabe, Hg. v. Leonard Keidel und Janina Reibold. Durchsuchbare Online-Ausgabe der Briefe von und an Johann Georg Hamann.";
ViewData["showCredits"] = "false";
}
<script defer src="/js/upload.js" asp-append-version="true"></script>
<div class="ha-adminuploadfields" id="ha-adminuploadfields">
@foreach (var item in Model.AvailableRoots!.OrderBy(x => x.Type)) {
<a class="ha-uploadfield" asp-controller="Upload" asp-action="Index" asp-route-id="@item.Prefix">
<div class="ha-uploadfieldname">@item.Type</div>
@if (Model.UsedFiles != null && Model.UsedFiles.ContainsKey(item.Prefix)) {
<div class="ha-uploadusedfiles">
@foreach(var file in Model.UsedFiles[item.Prefix]!) {
@if (file == Model.UsedFiles[item.Prefix]!.Last())
{
<span class="ha-uploadusedfile">@file.FileName</span>
}
else
{
<span class="ha-uploadusedfile">@file.FileName;</span>
}
}
</div>
}
else {
<div class="ha-uploadusedfiles ha-uploadusedfilesnotfound">Keine Datei geladen!</div>
}
</a>
}
<div class="ha-uploadpublishforms">
@await Html.PartialAsync("/Views/Shared/_UploadForm.cshtml", Model)
<a class="ha-publishbutton" asp-controller="Upload" asp-action="Index" asp-route-id="@string.Empty">
<div class="ha-publishtext">Veröffentlichen</div>
</a>
</div>
</div>
<div class="ha-uploadheader">
<h1 class="ha-uploadtitle">@Model.ActiveTitle</h1>
</div>
<div class="ha-uploadcontainer">
@* File Category Page File List *@
@if (Model.AvailableFiles != null && Model.AvailableFiles.Any()) {
<div class="ha-filesheader">
<div class="ha-availablefiles" id="ha-availablefiles">
<div class="ha-availablefilestitle">Datei(en)</div>
@if(Model.UsedFiles != null && Model.UsedFiles.ContainsKey(Model.Prefix)) {
<div class="ha-usedfilelist">
@foreach (var item in Model.UsedFiles[Model.Prefix]!)
{
if(item == Model.UsedFiles[Model.Prefix]!.Last()) {
<span class="ha-usedfile">@item.FileName</span>
}
else {
<span class="ha-usedfile">@item.FileName,</span>
}
}
</div>
}
</div>
<div class="ha-availablefileslist hidden" id="ha-availablefileslist">
@await Html.PartialAsync("/Views/Shared/_FileListForm.cshtml", (Model.AvailableFiles, "Verfügbare Dateien", "API", "SetUsed", Model.Prefix, "/Download/XML/" + Model.Prefix + "/", true))
</div>
</div>
@if (Model.UsedFiles != null && Model.Prefix != null && Model.UsedFiles.ContainsKey(Model.Prefix)) {
<textarea class="py-2 px-3 mx-8 mb-8 shadow-lg font-mono text-sm border rounded" id="errormessagebox" name="errormessagebox" rows="25" cols="90" readonly>
@foreach (var f in Model.UsedFiles[Model.Prefix])
{
@f.Messages
}
</textarea>
}
}
@* Start Page File List *@
else {
<div class="ha-publishfilelist">
@await Html.PartialAsync("/Views/Shared/_PublishForm.cshtml", Model)
</div>
<div class="ha-hamannfilechooser">
@await Html.PartialAsync("/Views/Shared/_FileListForm.cshtml", (Model.HamannFiles, "Verfügbare Hamann-Dateien", "API", "SetInProduction", string.Empty, "/Download/XML/", false))
</div>
<form class="ha-setendyearform" id="ha-setendyearform" enctype="application/x-www-form-urlencoded" asp-controller="API" asp-action="SetEndYear" onsubmit="YEARSUBMIT(this);return false;">
Verfügbare Jahre: bis&nbsp;
<select name="EndYear" id="">
@foreach (var y in Model.AvailableYears) {
<option>@y</option>
}
</select>
<button id="ha-setendyearbutton" class="ha-setendyearbutton" type="submit">Setzen</button>
</form>
}
</div>

View File

@@ -0,0 +1,83 @@
@model XMLStateViewModel;
@{
ViewData["Title"] = "Syntax-Check & Dateien";
ViewData["SEODescription"] = "Johann Georg Hamann: Kommentierte Briefausgabe, Hg. v. Leonard Keidel und Janina Reibold. Durchsuchbare Online-Ausgabe der Briefe von und an Johann Georg Hamann.";
ViewData["showCredits"] = "false";
}
<div class="ha-adminuploadfields" id="ha-adminuploadfields">
<div class="ha-uploadcontainer">
@* File Category Page File List *@
@*
<textarea class="py-2 px-3 mx-8 mb-8 shadow-lg font-mono text-sm border rounded" id="errormessagebox" name="errormessagebox" rows="25" cols="90" readonly>
@foreach (var f in Model.Files[Model.Prefix])
{
@f.Messages
}
</textarea> *@
@if (Model.ManagedFiles != null && Model.ManagedFiles.Any()) {
<div class="">
<table>
@foreach (var f in Model.ManagedFiles) {
<tr>
<td>@f.FileName</td>
<td>@f.GetLastModified()</td>
@if (f.IsValid) {
<td>Valid! @f.GetLog()</td>
} else {
<td>@f.GetLog()</td>
}
</tr>
}
</table>
</div>
} else {
<div class="">
Keine Dateien im Repository gefunden!
</div>
}
<script defer src="/js/filelistform.js" asp-append-version="true"></script>
<fieldset class="ha-filelistfieldset">
<legend class="ha-filelistlegend">Verfügbare Dateien</legend>
@if(Model.HamannFiles != null && Model.HamannFiles.Any()) {
<form class="ha-selectfilesform" id="selecthamannfilesform" asp-controller="API" asp-action="SetInProduction" method="post" onsubmit="USESubmit(this);return false;" enctype="multipart/form-data">
<div class="ha-filelistlist">
@foreach (var file in Model.HamannFiles) {
<div class="ha-filelistfile">
@if (Model.ActiveFile != null) {
<input type="radio" id="@file.Name" name="file" value="@file.Name" @(file.Name == @Model.ActiveFile!.Name ? "checked='checked'" : "")>
} else {
<input type="radio" id="@file.Name" name="file" value="@file.Name">
}
<div class="ha-filelistname">@file.Name</div>
@if (Model.ActiveFile != null && file.Name == Model.ActiveFile!.Name) {
<div class="ha-filelistusedproduction">
<div class="ha-filelistproduction">in Verwendung</div>
</div>
}
@* // TODO Metadata
// TODO DELETE *@
<div class="ha-filelistmodified">@file.LastModified.LocalDateTime</div>
</div>
}
</div>
@if (!Model.ValidState) {
<div>
Status nicht validiert! Daten können nicht auf der Webseite angezeigt werden!
</div>
}
<output id ="ha-filelistoutput"></output>
<button type="submit" class="ha-filelistbutton" id="ha-filelistbutton" >
Laden
<div class="ha-lds-ellipsis-load" id="ha-lds-ellipsis-load"></div>
</button>
</form>
}
else {
<div>Keine Dateien gefunden! Es wird eine fallback-Datei verwendet!</div>
}
</fieldset>

View File

@@ -1,53 +0,0 @@
@model (List<FileModel>? files, string title, string aspcontrolller, string aspaction, string id, string downloadprefix, bool multipleallowed);
<script defer src="/js/filelistform.js" asp-append-version="true"></script>
<fieldset class="ha-filelistfieldset">
<legend class="ha-filelistlegend">@Model.title</legend>
@if(Model.files != null && Model.files.Any()) {
<form class="ha-selectfilesform" id="selecthamannfilesform" asp-controller="@Model.aspcontrolller" asp-action="@Model.aspaction" asp-route-id="@Model.id" method="post" onsubmit="USESubmit(this);return false;" enctype="multipart/form-data">
<div class="ha-filelistlist">
@foreach (var file in Model.files.OrderByDescending(x => x.LastModified)) {
<div class="ha-filelistfile">
@if (Model.multipleallowed) {
<input type="checkbox" id="@file.FileName" name="file" value="@file.FileName" @(file.IsUsed ? "checked='checked'" : "")>
}
else {
<input type="radio" id="@file.FileName" name="file" value="@file.FileName" @(file.InProduction ? "checked='checked'" : "")>
}
<div class="ha-filelistname">@file.FileName</div>
@if (file.InProduction || file.IsUsed) {
<div class="ha-filelistusedproduction">
@if (file.InProduction) {
<div class="ha-filelistproduction">in Verwendung</div>
}
@if (file.IsUsed) {
<div class="ha-filelistused">geladen</div>
}
</div>
}
@if (file.Fields != null && file.Fields.Any()) {
<div class="ha-filelistfields">
@foreach (var field in file.Fields) {
@if (field.Item2 != null) {
<div class="ha-filelistfield">field.Item2</div>
}
}
</div>
}
<div class="ha-filelistmodified">@file.LastModified - @file.Prefix</div>
</div>
}
</div>
<output id ="ha-filelistoutput"></output>
<button type="submit" class="ha-filelistbutton" id="ha-filelistbutton" >
Laden
<div class="ha-lds-ellipsis-load" id="ha-lds-ellipsis-load"></div>
</button>
</form>
}
else {
<div>Keine Dateien gefunden! Es wird eine fallback-Datei verwendet!</div>
}
</fieldset>

View File

@@ -1,10 +1,10 @@
@model UploadViewModel;
@model XMLStateViewModel;
<script defer src="/js/publishform.js" asp-append-version="true"></script>
@if (Model.UsedFiles != null && Model.UsedFiles.Any()) {
@* @if (Model. != null && Model..Any()) {
<div class="ha-publishfilelisttitle">Aktuell geladene Dateien</div>
<table class="ha-publishfilelistlist">
@foreach (var (category, files) in Model.UsedFiles.OrderBy(x => x.Key))
@foreach (var (category, files) in Model..OrderBy(x => x.Key))
{
<tr>
<td>@Model.AvailableRoots.Where(x => x.Prefix == category).First().Type:</td>
@@ -32,4 +32,4 @@
<output form="uploadForm" name="publish-result" id="publish-result"></output>
</div>
</form>
}
} *@

View File

@@ -1,4 +1,4 @@
@model UploadViewModel;
@model XMLStateViewModel;
<script defer src="/js/uploadform.js" asp-append-version="true"></script>

View File

@@ -5,21 +5,17 @@ using HaWeb.Models;
using HaDocument.Interfaces;
using HaDocument.Models;
using HaXMLReader.Interfaces;
using Microsoft.Extensions.FileProviders;
public interface IXMLService {
public interface IXMLInteractionService {
public XElement? TryCreate();
public bool GetValidState();
public void Collect(List<IFileInfo> Files);
public Dictionary<string, FileList?>? GetLoaded();
public IXMLRoot? GetRoot(string name);
public List<IXMLRoot>? GetRootsList();
public Dictionary<string, IXMLRoot>? GetRootsDictionary();
public List<XMLRootDocument>? ProbeFile(XDocument document, ModelStateDictionary ModelState);
public Dictionary<string, FileList?>? GetUsedDictionary();
public XElement? MergeUsedDocuments(ModelStateDictionary ModelState);
public void Use(XMLRootDocument doc);
public void AutoUse(string prefix);
public void AutoUse(FileList filelist);
public Dictionary<string, FileList?>? GetInProduction();
public void UnUse(string prefix);
public void SetInProduction();
public void SetInProduction(XDocument document);
public void CreateSearchables(XDocument document);
public List<FileModel>? GetManagedFiles();
public List<(string Index, List<(string Page, string Line, string Preview, string Identifier)> Results)>? SearchCollection(string collection, string searchword, IReaderService reader, ILibrary? lib);
public List<(string Index, List<(string Page, string Line, string Preview, string Identifier)> Results)>? GetPreviews(List<(string, List<Marginal>)> places, IReaderService reader, ILibrary lib);
}

View File

@@ -58,8 +58,5 @@ public interface IXMLRoot {
// });
// return ret;
// }
public abstract XElement CreateHamannDocument(XElement element);
public abstract void MergeIntoFile(XElement file, XMLRootDocument document);
}

View File

@@ -1,123 +1,169 @@
namespace HaWeb.XMLParser;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Text;
using System.Xml;
using System.Xml.Linq;
using System.Xml.XPath;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using HaWeb.Models;
using HaWeb.SearchHelpers;
using System.Collections.Concurrent;
using System.Threading.Tasks;
using System.Text;
using HaXMLReader.Interfaces;
using HaDocument.Interfaces;
using HaDocument.Models;
using HaWeb.FileHelpers;
using HaWeb.Models;
using HaWeb.SearchHelpers;
using HaWeb.XMLParser;
using HaWeb.XMLTests;
using HaXMLReader.Interfaces;
using Microsoft.Extensions.FileProviders;
// XMLService provides a wrapper around the loaded and used XML data
public class XMLService : IXMLService {
private Dictionary<string, FileList?>? _Used;
private Dictionary<string, IXMLRoot>? _Roots;
private Dictionary<string, IXMLCollection>? _Collections;
// Conditions for Successful create
// All types there
// Merging Success
// Saving Success
// Loading Success
private Stack<Dictionary<string, FileList?>>? _InProduction;
// Startup (BEFORE IXMLFileProvider, After IHaDocumentWrapper)
public class XMLInteractionService : IXMLInteractionService {
private readonly IXMLTestService _testService;
private readonly long _fileSizeLimit;
private readonly string[] _allowedExtensions = { ".xml" };
private readonly static XmlReaderSettings _xmlSettings = new XmlReaderSettings() {
CloseInput = true,
CheckCharacters = false,
ConformanceLevel = ConformanceLevel.Fragment,
IgnoreComments = true,
IgnoreProcessingInstructions = true,
IgnoreWhitespace = false
};
private Dictionary<string, ItemsCollection>? _collectedProduction;
private Dictionary<string, ItemsCollection>? _collectedUsed;
private Dictionary<string, IXMLRoot>? _RootDefs;
private Dictionary<string, IXMLCollection>? _CollectionDefs;
public XMLService() {
// Getting all classes which implement IXMLRoot for possible document endpoints
private List<FileModel>? _ManagedFiles;
private Dictionary<string, FileList?>? _Loaded;
private Dictionary<string, ItemsCollection>? _Collection;
private bool _ValidState = false;
public XMLInteractionService(IConfiguration config, IXMLTestService testService) {
_testService = testService;
_fileSizeLimit = config.GetValue<long>("FileSizeLimit");
var roottypes = _GetAllTypesThatImplementInterface<IXMLRoot>().ToList();
roottypes.ForEach( x => {
if (this._Roots == null) this._Roots = new Dictionary<string, IXMLRoot>();
if (this._RootDefs == null) this._RootDefs = new Dictionary<string, IXMLRoot>();
var instance = (IXMLRoot)Activator.CreateInstance(x)!;
if (instance != null) this._Roots.Add(instance.Prefix, instance);
if (instance != null) this._RootDefs.Add(instance.Prefix, instance);
});
var collectiontypes = _GetAllTypesThatImplementInterface<IXMLCollection>().ToList();
collectiontypes.ForEach( x => {
if (this._Collections == null) this._Collections = new Dictionary<string, IXMLCollection>();
if (this._CollectionDefs == null) this._CollectionDefs = new Dictionary<string, IXMLCollection>();
var instance = (IXMLCollection)Activator.CreateInstance(x)!;
if (instance != null && instance.IsGlobal()) this._Collections.Add(instance.Key, instance);
if (instance != null && instance.IsGlobal()) this._CollectionDefs.Add(instance.Key, instance);
});
if (_Roots == null || !_Roots.Any())
if (_RootDefs == null || !_RootDefs.Any())
throw new Exception("No classes for upload endpoints were found!");
if (_Collections == null || !_Collections.Any())
if (_CollectionDefs == null || !_CollectionDefs.Any())
throw new Exception("No classes for object collection were found!");
}
// Getters and Setters
public Dictionary<string, FileList?>? GetLoaded() => this._Loaded;
public List<FileModel>? GetManagedFiles() => this._ManagedFiles;
public List<IXMLRoot>? GetRootsList() => this._RootDefs == null ? null : this._RootDefs.Values.ToList();
public bool GetValidState() => this._ValidState;
public IXMLRoot? GetRoot(string name) {
if (_Roots == null) return null;
_Roots.TryGetValue(name, out var root);
if (_RootDefs == null) return null;
_RootDefs.TryGetValue(name, out var root);
return root;
}
public List<IXMLRoot>? GetRootsList() => this._Roots == null ? null : this._Roots.Values.ToList();
public Dictionary<string, IXMLRoot>? GetRootsDictionary() => this._Roots == null ? null : this._Roots;
public Dictionary<string, FileList?>? GetInProduction() {
if (_InProduction == null) return null;
return this._InProduction.Peek();
}
public void SetInProduction() {
if (_Used == null) return;
var inProduction = new Dictionary<string, FileList?>();
foreach (var category in _Used) {
if (category.Value == null || category.Value.GetFileList() == null || !category.Value.GetFileList()!.Any())
return;
inProduction.Add(category.Key, category.Value);
}
if(_InProduction == null) _InProduction = new Stack<Dictionary<string, FileList?>>();
_InProduction.Push(inProduction);
}
public void SetInProduction(XDocument document) {
if (document == null || _Roots == null) return;
int numProcs = Environment.ProcessorCount;
int concurrencyLevel = numProcs * 2;
int startingSize = 2909;
int startingSizeAllCollections = 23;
var ret = new ConcurrentDictionary<string, ItemsCollection>(concurrencyLevel, startingSizeAllCollections);
if (_Collections != null)
Parallel.ForEach(_Collections, (coll) => {
var elem = coll.Value.xPath.Aggregate(new List<XElement>(), (x, y) => { x.AddRange(document.XPathSelectElements(y).ToList()); return x; } );
if (elem != null && elem.Any()) {
var items = new ConcurrentDictionary<string, CollectedItem>(concurrencyLevel, startingSize);
foreach (var e in elem) {
var k = coll.Value.GenerateKey(e);
if (k != null) {
var searchtext = coll.Value.Searchable ?
StringHelpers.NormalizeWhiteSpace(e.ToString(), ' ', false) :
null;
var datafileds = coll.Value.GenerateDataFields != null ?
coll.Value.GenerateDataFields(e) :
null;
items[k] = new CollectedItem(k, e, coll.Value, datafileds, searchtext);
}
}
if (items.Any()) {
if (!ret.ContainsKey(coll.Key))
ret[coll.Key] = new ItemsCollection(coll.Key, coll.Value);
foreach (var item in items)
ret[coll.Key].Items.Add(item.Key, item.Value);
// Functions
public void Collect(List<IFileInfo> files) {
if (files == null || !files.Any()) return;
_ValidState = true;
List<FileModel> res = new List<FileModel>();
foreach (var f in files) {
var sb = new StringBuilder();
var m = _CreateFileModel(f, null);
res.Add(m);
// 1. Open File for Reading
try {
using (Stream file = f.CreateReadStream()) {
// 2. Some security checks, if file empty, wrong start, wrong extension, too big
if (!XMLFileHelpers.ProcessFile(file, f.Name, sb, _allowedExtensions, _fileSizeLimit)) {
m.Log(sb.ToString());
continue;
}
}
});
} catch {
m.Log( "Datei konnte nicht geöffnet werden.");
continue;
}
if (ret.Any()) {
Parallel.ForEach(ret, (collection) => {
collection.Value.GenerateGroupings();
});
// 3. Check validity of XML
try {
using (var xmlreader = XmlReader.Create(f.CreateReadStream(), _xmlSettings)) {
var doc = XDocument.Load(xmlreader, LoadOptions.PreserveWhitespace | LoadOptions.SetLineInfo);
// 4. Check if opus-Document
// TODO: Unter der HOOD werden in ProbeFiles noch eigene Files gebaut!
var docs = _ProbeFile(doc, m);
if (docs == null || !docs.Any()) continue;
// Success! File can be recognized and parsed.
m.Validate();
foreach (var d in docs) {
if (_Loaded == null) _Loaded = new Dictionary<string, FileList?>();
if (!_Loaded.ContainsKey(d.Prefix)) _Loaded.Add(d.Prefix, new FileList(d.XMLRoot));
_Loaded[d.Prefix]!.Add(d);
}
}
} catch (Exception ex) {
m.Log($"Ungültiges XML: {ex.Message}");
continue;
}
}
_collectedProduction = ret.ToDictionary(x => x.Key, y => y.Value);
if(res.Any()) this._ManagedFiles = res;
// Set validity
foreach (var f in _ManagedFiles) {
if (!f.IsValid) _ValidState = false;
break;
}
// TODO: Speed up this:
var sw = new Stopwatch();
sw.Start();
_testService.Test(this);
sw.Stop();
Console.WriteLine("Syntaxcheck " + sw.ElapsedMilliseconds.ToString() + " ms");
}
public XElement? TryCreate() {
if (_Loaded == null || !_Loaded.Any() || _RootDefs == null || !_RootDefs.Any() || !_ValidState) return null;
var opus = new XElement("opus");
// TODO: Workaround for bug in HaDocument: roots have to be added in a specific order
var used = _Loaded.OrderByDescending(x => x.Key);
foreach (var category in used) {
if (category.Value == null || category.Value.GetFileList() == null || !category.Value.GetFileList()!.Any()) {
return null;
}
var documents = category.Value.GetFileList();
foreach (var document in documents!) {
document.XMLRoot.MergeIntoFile(opus, document);
}
}
return opus;
}
public List<(string Index, List<(string Page, string Line, string Preview, string Identifier)> Results)>? GetPreviews(List<(string, List<Marginal>)> places, IReaderService reader, ILibrary lib) {
var searchableObjects = _collectedProduction["letters"].Items;
if (!_Collection.ContainsKey("letters")) return null;
var searchableObjects = _Collection["letters"].Items;
var res = new ConcurrentBag<(string Index, List<(string Page, string Line, string preview, string identifier)> Results)>();
Parallel.ForEach(places, (obj) => {
@@ -148,8 +194,8 @@ public class XMLService : IXMLService {
}
public List<(string Index, List<(string Page, string Line, string Preview, string Identifier)> Results)>? SearchCollection(string collection, string searchword, IReaderService reader, ILibrary lib) {
if (!_collectedProduction.ContainsKey(collection)) return null;
var searchableObjects = _collectedProduction[collection].Items;
if (!_Collection.ContainsKey(collection)) return null;
var searchableObjects = _Collection[collection].Items;
var res = new ConcurrentBag<(string Index, List<(string Page, string Line, string preview, string identifier)> Results)>();
var sw = StringHelpers.NormalizeWhiteSpace(searchword.Trim());
@@ -202,110 +248,88 @@ public class XMLService : IXMLService {
return res.ToList();
}
public List<XMLRootDocument>? ProbeFile(XDocument document, ModelStateDictionary ModelState) {
if (document.Root!.Name != "opus") {
ModelState.AddModelError("Error", "A valid Hamann-Docuemnt must begin with <opus>");
return null;
}
public void CreateSearchables(XDocument document) {
if (document == null || _RootDefs == null) return;
int numProcs = Environment.ProcessorCount;
int concurrencyLevel = numProcs * 2;
int startingSize = 2909;
int startingSizeAllCollections = 23;
var ret = new ConcurrentDictionary<string, ItemsCollection>(concurrencyLevel, startingSizeAllCollections);
List<XMLRootDocument>? res = null;
if (document.Root != null && _Roots != null) {
foreach (var (_, root) in _Roots) {
var elements = root.IsTypeOf(document.Root);
if (elements != null && elements.Any())
foreach (var elem in elements) {
if (res == null) res = new List<XMLRootDocument>();
res.Add(_createXMLRootDocument(root, elem));
if (_CollectionDefs != null)
Parallel.ForEach(_CollectionDefs, (coll) => {
var elem = coll.Value.xPath.Aggregate(new List<XElement>(), (x, y) => { x.AddRange(document.XPathSelectElements(y).ToList()); return x; } );
if (elem != null && elem.Any()) {
var items = new ConcurrentDictionary<string, CollectedItem>(concurrencyLevel, startingSize);
foreach (var e in elem) {
var k = coll.Value.GenerateKey(e);
if (k != null) {
var searchtext = coll.Value.Searchable ?
StringHelpers.NormalizeWhiteSpace(e.ToString(), ' ', false) :
null;
var datafileds = coll.Value.GenerateDataFields != null ?
coll.Value.GenerateDataFields(e) :
null;
items[k] = new CollectedItem(k, e, coll.Value, datafileds, searchtext);
}
}
if (items.Any()) {
if (!ret.ContainsKey(coll.Key))
ret[coll.Key] = new ItemsCollection(coll.Key, coll.Value);
foreach (var item in items)
ret[coll.Key].Items.Add(item.Key, item.Value);
}
}
}
if (res == null) ModelState.AddModelError("Error", "Kein zum Hamann-Briefe-Projekt passendes XML gefunden.");
return res;
}
public Dictionary<string, FileList?>? GetUsedDictionary()
=> this._Used;
// Adds a document and sets it to used
public void Use(XMLRootDocument doc) {
if (_Used == null) _Used = new Dictionary<string, FileList?>();
if (!_Used.ContainsKey(doc.Prefix)) _Used.Add(doc.Prefix, new FileList(doc.XMLRoot));
_Used[doc.Prefix]!.Add(doc);
_ = doc.GetElement();
}
public void UnUse(string prefix) {
if (_Used != null && _Used.ContainsKey(prefix)) {
// Unload the Elements so unused files don't use up the memory.
if (_Used[prefix]!.GetFileList() != null) {
foreach (var e in _Used[prefix]!.GetFileList()) {
e.UnUse();
}
}
_Used.Remove(prefix);
});
if (ret.Any()) {
Parallel.ForEach(ret, (collection) => {
collection.Value.GenerateGroupings();
});
}
return;
_Collection = ret.ToDictionary(x => x.Key, y => y.Value);
}
// Performs detection of using on the specified document type
public void AutoUse(string prefix) {
if (_Used == null || !_Used.ContainsKey(prefix)) return;
AutoUse(_Used[prefix]!);
}
// Performs detection of using given a list of files
public void AutoUse(FileList filelist) {
FileList? res = null;
var list = filelist.GetFileList();
var prefix = filelist.XMLRoot.Prefix;
if (list == null) return;
if (_Used != null && _Used.ContainsKey(prefix)) _Used.Remove(prefix);
// TODO: Item1
var lookup = list.ToLookup(x => x.IdentificationString.Item2);
foreach (var idstring in lookup) {
var ordered = idstring.OrderBy(x => x.Date);
if (res == null) res = new FileList(filelist.XMLRoot);
Use(ordered.Last());
}
}
public XElement? MergeUsedDocuments(ModelStateDictionary ModelState) {
if (_Used == null || _Roots == null) {
ModelState.AddModelError("Error", "Keine Dokumente ausgewählt");
return null;
}
var opus = new XElement("opus");
// TODO: Workaround for bug in HaDocument: roots have to be added in a specific order
var used = _Used.OrderByDescending(x => x.Key);
foreach (var category in used) {
if (category.Value == null || category.Value.GetFileList() == null || !category.Value.GetFileList()!.Any()) {
ModelState.AddModelError("Error", _Roots![category.Key].Type + " nicht vorhanden.");
return null;
}
var documents = category.Value.GetFileList();
foreach (var document in documents!) {
document.XMLRoot.MergeIntoFile(opus, document);
}
}
return opus;
}
private XMLRootDocument _createXMLRootDocument(IXMLRoot Root, XElement element) {
var doc = new XMLRootDocument(Root, Root.Prefix, Root.GenerateIdentificationString(element), element);
doc.Fields = Root.GenerateFields(doc);
return doc;
}
private IEnumerable<Type> _GetAllTypesThatImplementInterface<T>()
{
private IEnumerable<Type> _GetAllTypesThatImplementInterface<T>() {
return System.Reflection.Assembly.GetExecutingAssembly()
.GetTypes()
.Where(type => typeof(T).IsAssignableFrom(type) && !type.IsInterface);
}
private List<XMLRootDocument>? _ProbeFile(XDocument document, FileModel file) {
if (document.Root!.Name != "opus") {
file.Log("Ein gültiges Dokument muss mit <opus> beginnen.");
return null;
}
List<XMLRootDocument>? res = null;
if (document.Root != null && _RootDefs != null) {
foreach (var (_, root) in _RootDefs) {
var elements = root.IsTypeOf(document.Root);
if (elements != null && elements.Any())
foreach (var elem in elements) {
if (res == null) res = new List<XMLRootDocument>();
res.Add(_createXMLRootDocument(root, elem, file));
}
}
}
if (res == null) file.Log("Dokumenten-Typ nicht erkannt.");
return res;
}
private XMLRootDocument _createXMLRootDocument(IXMLRoot Root, XElement element, FileModel file) {
var doc = new XMLRootDocument(Root, Root.Prefix, Root.GenerateIdentificationString(element), element, file);
doc.Fields = Root.GenerateFields(doc);
return doc;
}
private FileModel _CreateFileModel(IFileInfo file, string? message) {
var m = new FileModel(file.Name, file);
if (!String.IsNullOrWhiteSpace(message)) {
m.Log(message);
}
return m;
}
}

View File

@@ -0,0 +1,138 @@
namespace HaWeb.BackgroundTask;
using System.Threading.Channels;
public interface IBackgroundTaskQueue {
ValueTask QueueBackgroundWorkItemAsync(Func<CancellationToken, ValueTask> workItem);
ValueTask<Func<CancellationToken, ValueTask>> DequeueAsync(CancellationToken cancellationToken);
}
public interface IMonitorLoop {
public void StartMonitorLoop();
}
public class BackgroundTaskQueue : IBackgroundTaskQueue {
private readonly Channel<Func<CancellationToken, ValueTask>> _queue;
public BackgroundTaskQueue(int capacity) {
var options = new BoundedChannelOptions(capacity) {
FullMode = BoundedChannelFullMode.Wait
};
_queue = Channel.CreateBounded<Func<CancellationToken, ValueTask>>(options);
}
public async ValueTask QueueBackgroundWorkItemAsync(Func<CancellationToken, ValueTask> workItem) {
if (workItem == null) {
throw new ArgumentNullException(nameof(workItem));
}
await _queue.Writer.WriteAsync(workItem);
}
public async ValueTask<Func<CancellationToken, ValueTask>> DequeueAsync(
CancellationToken cancellationToken
) {
var workItem = await _queue.Reader.ReadAsync(cancellationToken);
return workItem;
}
}
public class QueuedHostedService : BackgroundService {
private readonly ILogger<QueuedHostedService> _logger;
public IBackgroundTaskQueue TaskQueue { get; }
public QueuedHostedService(IBackgroundTaskQueue taskQueue, ILogger<QueuedHostedService> logger) {
TaskQueue = taskQueue;
_logger = logger;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken) {
await BackgroundProcessing(stoppingToken);
}
private async Task BackgroundProcessing(CancellationToken stoppingToken) {
while (!stoppingToken.IsCancellationRequested) {
var workItem = await TaskQueue.DequeueAsync(stoppingToken);
try {
await workItem(stoppingToken);
}
catch (Exception ex) {
_logger.LogError(ex, "Error occurred executing {WorkItem}.", nameof(workItem));
}
}
}
public override async Task StopAsync(CancellationToken stoppingToken) {
_logger.LogInformation("Queued Hosted Service is stopping.");
await base.StopAsync(stoppingToken);
}
}
public class MonitorLoop : IMonitorLoop {
private readonly IBackgroundTaskQueue _taskQueue;
private readonly ILogger _logger;
private readonly CancellationToken _cancellationToken;
public MonitorLoop(
IBackgroundTaskQueue taskQueue,
ILogger<MonitorLoop> logger,
IHostApplicationLifetime applicationLifetime
) {
_taskQueue = taskQueue;
_logger = logger;
_cancellationToken = applicationLifetime.ApplicationStopping;
}
public void StartMonitorLoop() {
_logger.LogInformation("MonitorAsync Loop is starting.");
// Run a console user input loop in a background thread
Task.Run(async () => await MonitorAsync());
}
private async ValueTask MonitorAsync() {
while (!_cancellationToken.IsCancellationRequested) {
var keyStroke = Console.ReadKey();
if (keyStroke.Key == ConsoleKey.W) {
// Enqueue a background work item
await _taskQueue.QueueBackgroundWorkItemAsync(BuildWorkItem);
}
}
}
private async ValueTask BuildWorkItem(CancellationToken token) {
// Simulate three 5-second tasks to complete
// for each enqueued work item
int delayLoop = 0;
var guid = Guid.NewGuid().ToString();
_logger.LogInformation("Queued Background Task {Guid} is starting.", guid);
while (!token.IsCancellationRequested && delayLoop < 3) {
try {
await Task.Delay(TimeSpan.FromSeconds(5), token);
}
catch (OperationCanceledException) {
// Prevent throwing if the Delay is cancelled
}
delayLoop++;
_logger.LogInformation(
"Queued Background Task {Guid} is running. " + "{DelayLoop}/3",
guid,
delayLoop
);
}
if (delayLoop == 3) {
_logger.LogInformation("Queued Background Task {Guid} is complete.", guid);
}
else {
_logger.LogInformation("Queued Background Task {Guid} was cancelled.", guid);
}
}
}

View File

@@ -0,0 +1,6 @@
namespace HaWeb.XMLTests;
public class HamannXPath {
public string[]? Documents { get; set; }
public string? XPath { get; set; }
}

View File

@@ -4,8 +4,8 @@ using System.Xml.Linq;
public interface ICollectionRule {
public string Name { get; }
public string[] Bases { get; }
public string[] Backlinks { get; }
public HamannXPath[] Bases { get; }
public HamannXPath[] Backlinks { get; }
public IEnumerable<(string, XElement, XMLRootDocument)> GenerateIdentificationStrings(IEnumerable<(XElement, XMLRootDocument)> List);
public IEnumerable<(string, XElement, XMLRootDocument, bool)> GenerateBacklinkString(IEnumerable<(XElement, XMLRootDocument)> List);
}

View File

@@ -1,3 +1,5 @@
using HaWeb.XMLParser;
namespace HaWeb.XMLTests;
public interface IXMLTestService {
@@ -5,5 +7,5 @@ public interface IXMLTestService {
public Dictionary<string, INodeRule>? Ruleset { get; }
public Dictionary<string, ICollectionRule>? CollectionRuleset { get; }
public void Test();
public void Test(IXMLInteractionService _XMLService);
}

View File

@@ -2,8 +2,8 @@ namespace HaWeb.XMLTests;
public interface INodeRule {
public string Name { get; }
public string XPath { get; }
public HamannXPath XPath { get; }
public string? uniquenessAttribute { get; }
public List<(string LinkAttribute, string RemoteElement, string RemoteAttribute)>? References { get; }
public List<(string LinkAttribute, HamannXPath RemoteElement, string RemoteAttribute)>? References { get; }
public string[]? Attributes { get; }
}

View File

@@ -2,12 +2,9 @@ namespace HaWeb.XMLTests;
using HaWeb.XMLParser;
public class XMLTestService : IXMLTestService {
private IXMLService _XMLService;
public Dictionary<string, INodeRule>? Ruleset { get; private set; }
public Dictionary<string, ICollectionRule>? CollectionRuleset { get; private set; }
public XMLTestService(IXMLService xmlService) {
_XMLService = xmlService;
public XMLTestService() {
var roottypes = _GetAllTypesThatImplementInterface<INodeRule>().ToList();
roottypes.ForEach( x => {
if (this.Ruleset == null) this.Ruleset = new Dictionary<string, INodeRule>();
@@ -23,17 +20,10 @@ public class XMLTestService : IXMLTestService {
});
}
public void Test() {
var docs = _XMLService.GetUsedDictionary();
public void Test(IXMLInteractionService _XMLService) {
var docs = _XMLService.GetLoaded();
if (docs == null) return;
foreach (var d in docs.Values) {
var fl = d.GetFileList();
if (fl == null) continue;
foreach (var v in fl) {
v.ResetLog();
}
}
var tester = new XMLTester(this, _XMLService.GetUsedDictionary());
var tester = new XMLTester(this, docs);
tester.Test();
}

View File

@@ -29,7 +29,7 @@ public class XMLTester {
public void Test() {
if (_Ruleset == null) return;
_IDs = new System.Collections.Generic.Dictionary<string, HashSet<string>>();
_IDs = new Dictionary<string, HashSet<string>>();
foreach (var rule in _Ruleset) {
buildIDs(rule.Value);
checkRequiredAttributes(rule.Value);
@@ -42,6 +42,7 @@ public class XMLTester {
checkReferences(collectionrule.Value);
}
}
private void checkReferences(INodeRule rule) {
if (rule.References == null || !rule.References.Any()) return;
var elements = GetEvaluateXPath(rule.XPath);
@@ -49,11 +50,11 @@ public class XMLTester {
foreach (var e in elements) {
foreach (var r in rule.References) {
var hasattr = checkAttribute(e.Item1, r.LinkAttribute, e.Item2, false);
var keyname = r.RemoteElement + "-" + r.RemoteAttribute;
var keyname = r.RemoteElement.XPath + "-" + r.RemoteAttribute;
if (_IDs != null && _IDs.ContainsKey(keyname) && hasattr) {
var val = e.Item1.Attribute(r.LinkAttribute)!.Value;
if (!_IDs[keyname].Contains(val)) {
e.Item2.Log(generateLogMessage(e.Item1) + "Verlinktes Element " + val + " nicht gefunden.");
e.Item2.File.Log(generateLogMessage(e.Item1) + "Verlinktes Element " + val + " nicht gefunden.");
}
}
}
@@ -68,7 +69,7 @@ public class XMLTester {
if (elemens != null && elemens.Any()) {
foreach(var r in rule.GenerateBacklinkString(elemens)) {
if (!r.Item4 && !_CollectionIDs[rule.Name].Contains(r.Item1)) {
r.Item3.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden.");
r.Item3.File.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden.");
}
if (r.Item4) {
var coll = _CollectionIDs[rule.Name];
@@ -76,7 +77,7 @@ public class XMLTester {
var searchterm = items[0];
var found = coll.Where(x => x.StartsWith(searchterm));
if (items[0] == "NA" || found == null || !found.Any()) {
r.Item3.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden.");
r.Item3.File.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden.");
} else {
for (var i = 1; i < items.Length; i++) {
if (items[i] == "NA") break;
@@ -84,7 +85,7 @@ public class XMLTester {
searchterm = searchterm + "-" + items[i];
found = found.Where(x => x.StartsWith(searchterm));
if (found == null || !found.Any())
r.Item3.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden.");
r.Item3.File.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden.");
}
}
}
@@ -125,7 +126,7 @@ public class XMLTester {
if (elemens != null && elemens.Any()) {
foreach (var r in rule.GenerateIdentificationStrings(elemens)) {
if (!hs.Add(r.Item1)) {
r.Item3.Log(generateLogMessage(r.Item2) + "Brief-Seite-Zeile " + r.Item1 + " mehrdeutig.");
r.Item3.File.Log(generateLogMessage(r.Item2) + "Brief-Seite-Zeile " + r.Item1 + " mehrdeutig.");
}
}
}
@@ -134,24 +135,24 @@ public class XMLTester {
}
}
private void checkUniqueness(string xpathelement, string attribute) {
if (_Documents == null || _IDs == null || _IDs.ContainsKey(xpathelement + "-" + attribute)) return;
private void checkUniqueness(HamannXPath xpathelement, string attribute) {
if (_Documents == null || _IDs == null || _IDs.ContainsKey(xpathelement.XPath + "-" + attribute)) return;
var hs = new HashSet<string>();
var elements = GetEvaluateXPath(xpathelement);
if (elements != null)
foreach (var e in elements) {
if (checkAttribute(e.Item1, attribute, e.Item2)) {
if (!hs.Add(e.Item1.Attribute(attribute)!.Value)) {
e.Item2.Log(generateLogMessage(e.Item1) + "Attributwert " + e.Item1.Attribute(attribute)!.Value + " doppelt.");
e.Item2.File.Log(generateLogMessage(e.Item1) + "Attributwert " + e.Item1.Attribute(attribute)!.Value + " doppelt.");
}
}
}
_IDs.TryAdd(xpathelement + "-" + attribute, hs);
_IDs.TryAdd(xpathelement.XPath + "-" + attribute, hs);
}
private bool checkAttribute(XElement element, string attributename, XMLRootDocument doc, bool log = true) {
if (!element.HasAttributes || element.Attribute(attributename) == null) {
if (log) doc.Log(generateLogMessage(element) + "Attribut " + attributename + " fehlt.");
if (log) doc.File.Log(generateLogMessage(element) + "Attribut " + attributename + " fehlt.");
return false;
}
return true;
@@ -165,19 +166,21 @@ public class XMLTester {
": ";
}
private List<(XElement, XMLRootDocument)>? GetEvaluateXPath(string xpath) {
if (_XPathEvaluated.ContainsKey(xpath)) return _XPathEvaluated[xpath];
if (!_XPathEvaluated.ContainsKey(xpath)) _XPathEvaluated.Add(xpath, null);
if (_Documents == null) return null;
// Cache for XPATH evaluation
private List<(XElement, XMLRootDocument)>? GetEvaluateXPath(HamannXPath xpath) {
if (_Documents == null || xpath == null) return null;
if (_XPathEvaluated.ContainsKey(xpath.XPath)) return _XPathEvaluated[xpath.XPath];
if (!_XPathEvaluated.ContainsKey(xpath.XPath)) _XPathEvaluated.Add(xpath.XPath, null);
foreach (var d in _Documents) {
var elements = d.GetElement().XPathSelectElements(xpath).ToList();
if (xpath.Documents != null && !xpath.Documents.Contains(d.Prefix)) continue;
var elements = d.Element.XPathSelectElements("." + xpath.XPath).ToList();
if (elements != null && elements.Any()) {
if (_XPathEvaluated[xpath] == null) _XPathEvaluated[xpath] = new List<(XElement, XMLRootDocument)>();
if (_XPathEvaluated[xpath.XPath] == null) _XPathEvaluated[xpath.XPath] = new List<(XElement, XMLRootDocument)>();
foreach (var res in elements) {
_XPathEvaluated[xpath]!.Add((res, d));
_XPathEvaluated[xpath.XPath]!.Add((res, d));
}
}
}
return _XPathEvaluated[xpath];
return _XPathEvaluated[xpath.XPath];
}
}

View File

@@ -7,14 +7,16 @@
},
"FeatureManagement": {
"AdminService": true,
"UploadService": true,
"LocalPublishService": true,
"RemotePublishService": false,
"RemotePublishSourceService": false
"LocalPublishService": true
},
"AllowedHosts": "*",
"StoredFilePathLinux": "/home/simon/Downloads/test/",
"StoredFilePathWindows": "C:/Users/simon/Downloads/test/",
"HamannFileStoreLinux": "/home/simon/Downloads/test/",
"HamannFileStoreWindows": "C:/Users/simon/Downloads/test/",
"BareRepositoryPathLinux": "/home/simon/Downloads/test/",
"BareRepositoryPathWindows": "C:/Users/simon/source/hamann-xml/.git/",
"WorkingTreePathLinux": "/home/simon/Downloads/test/",
"WorkingTreePathWindows": "C:/Users/simon/source/hamann-xml/",
"RepositoryBranch": "main",
"StoredPDFPathWindows": "",
"StoredPDFPathLinux": "",
"FileSizeLimit": 52428800,

View File

@@ -1,16 +0,0 @@
{
"FormattingOptions": {
"NewLinesForBracesInLambdaExpressionBody": false,
"NewLinesForBracesInAnonymousMethods": false,
"NewLinesForBracesInAnonymousTypes": false,
"NewLinesForBracesInControlBlocks": false,
"NewLinesForBracesInTypes": false,
"NewLinesForBracesInMethods": false,
"NewLinesForBracesInProperties": false,
"NewLinesForBracesInObjectCollectionArrayInitializers": false,
"NewLinesForBracesInAccessors": false,
"NewLineForElse": false,
"NewLineForCatch": false,
"NewLineForFinally": false
}
}

View File

@@ -1,3 +1,9 @@
function getCookie(name) {
var value = "; " + document.cookie;
var parts = value.split("; " + name + "=");
if (parts.length == 2) return parts.pop().split(";").shift();
}
const USESubmit = async function (oFormElement, file = null) {
let fd = new FormData(oFormElement);
document.getElementById("ha-filelistbutton").style.pointerEvents = "none";

View File

@@ -1,22 +0,0 @@
const hideshowfiles = function() {
let elem = document.getElementById("ha-availablefileslist");
if (elem.classList.contains('hidden')) {
elem.classList.remove('hidden');
elem.classList.add('block');
}
else {
elem.classList.add('hidden');
elem.classList.remove('block');
}
}
function getCookie(name) {
var value = "; " + document.cookie;
var parts = value.split("; " + name + "=");
if (parts.length == 2) return parts.pop().split(";").shift();
}
var filesbutton = document.getElementById("ha-availablefiles");
if (filesbutton !== null)
filesbutton.addEventListener("click", () => hideshowfiles());