diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..7dc108c --- /dev/null +++ b/.editorconfig @@ -0,0 +1,2 @@ +[*.cs] +csharp_new_line_before_open_brace = none \ No newline at end of file diff --git a/HaDocumentV6/Document.cs b/HaDocumentV6/Document.cs index 1774b2d..9136b59 100644 --- a/HaDocumentV6/Document.cs +++ b/HaDocumentV6/Document.cs @@ -4,6 +4,7 @@ using HaDocument.Logic; using HaDocument.Reactors; using HaXMLReader.Interfaces; using HaXMLReader; +using System.Xml.Linq; namespace HaDocument { @@ -32,6 +33,18 @@ namespace HaDocument return GetLibrary(); } + public static ILibrary Create(IHaDocumentOptions Settings, XElement root) { + _lib = new IntermediateLibrary(); + SettingsValidator.Validate(Settings); + _settings = Settings; + _createReader(root); + _createReactors(); + _reader.Read(); + _library = _createLibrary(); + _reader.Dispose(); + return GetLibrary(); + } + private static void _createReactors() { new EditreasonReactor(_reader, _lib, _settings.NormalizeWhitespace); new HandDefsReactor(_reader, _lib); @@ -49,6 +62,10 @@ namespace HaDocument _reader = new FileReader(_settings.HamannXMLFilePath); } + private static void _createReader(XElement root) { + _reader = new XElementReader(root); + } + private static ILibrary _createLibrary() => _lib.GetLibrary(_settings); diff --git a/HaWeb/.editorconfig b/HaWeb/.editorconfig new file mode 100644 index 0000000..7dc108c --- /dev/null +++ b/HaWeb/.editorconfig @@ -0,0 +1,2 @@ +[*.cs] +csharp_new_line_before_open_brace = none \ No newline at end of file diff --git a/HaWeb/Controllers/APIController.cs b/HaWeb/Controllers/APIController.cs index e6736b5..31b0b97 100644 --- a/HaWeb/Controllers/APIController.cs +++ b/HaWeb/Controllers/APIController.cs @@ -20,265 +20,27 @@ using Microsoft.AspNetCore.Http.Features; using System.Text; // Controlling all the API-Endpoints +[FeatureGate(Features.AdminService)] public class APIController : Controller { // DI private IHaDocumentWrappper _lib; - private IReaderService _readerService; - private readonly long _fileSizeLimit; - private readonly string _targetFilePath; - private readonly IXMLService _xmlService; - private readonly IXMLProvider _xmlProvider; - private readonly IXMLTestService _testService; + private readonly IXMLFileProvider _xmlProvider; // Options - private static readonly string[] _permittedExtensions = { ".xml" }; private static readonly FormOptions _defaultFormOptions = new FormOptions(); - public APIController(IHaDocumentWrappper lib, IReaderService readerService, IXMLService xmlService, IXMLProvider xmlProvider, IXMLTestService testService, IConfiguration config) { + public APIController(IHaDocumentWrappper lib, IXMLFileProvider xmlProvider) { _lib = lib; _xmlProvider = xmlProvider; - _readerService = readerService; - _xmlService = xmlService; - _testService = testService; - _fileSizeLimit = config.GetValue("FileSizeLimit"); - if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { - _targetFilePath = config.GetValue("StoredFilePathWindows"); - } else { - _targetFilePath = config.GetValue("StoredFilePathLinux"); - } } - [HttpGet] - [Route("API/Syntaxcheck/{id}")] - [DisableFormValueModelBinding] - [ValidateAntiForgeryToken] - [FeatureGate(Features.UploadService, Features.AdminService)] - public IActionResult SyntaxCheck(string id) { - return Ok(); - } - - //// UPLOAD //// - [HttpPost] - [Route("API/Upload")] - [DisableFormValueModelBinding] - [ValidateAntiForgeryToken] - [FeatureGate(Features.UploadService, Features.AdminService)] - public async Task Upload() { - List? docs = null; - //// 1. Stage: Check Request format and request spec - // Checks the Content-Type Field (must be multipart + Boundary) - if (!MultipartRequestHelper.IsMultipartContentType(Request.ContentType)) { - ModelState.AddModelError("Error", $"Wrong / No Content Type on the Request"); - return BadRequest(ModelState); - } - - // Divides the multipart document into it's sections and sets up a reader - var boundary = MultipartRequestHelper.GetBoundary(MediaTypeHeaderValue.Parse(Request.ContentType), _defaultFormOptions.MultipartBoundaryLengthLimit); - var reader = new MultipartReader(boundary, HttpContext.Request.Body); - MultipartSection? section = null; - try { - section = await reader.ReadNextSectionAsync(); - } catch (Exception ex) { - ModelState.AddModelError("Error", "The Request is bad: " + ex.Message); - return BadRequest(ModelState); - } - - while (section != null) { - // Multipart document content disposition header read for a section: - // Starts with boundary, contains field name, content-dispo, filename, content-type - var hasContentDispositionHeader = ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out var contentDisposition); - - if (contentDisposition != null && contentDisposition.Name == "__RequestVerificationToken") { - try { - section = await reader.ReadNextSectionAsync(); - } catch (Exception ex) { - ModelState.AddModelError("Error", "The Request is bad: " + ex.Message); - } - continue; - } - - if (hasContentDispositionHeader && contentDisposition != null) { - // Checks if it is a section with content-disposition, name, filename - if (!MultipartRequestHelper.HasFileContentDisposition(contentDisposition)) { - ModelState.AddModelError("Error", $"Wrong Content-Dispostion Headers in Multipart Document"); - return BadRequest(ModelState); - } - - //// 2. Stage: Check File. Sanity checks on the file on a byte level, extension checking, is it empty etc. - var streamedFileContent = await XMLFileHelpers.ProcessStreamedFile( - section, contentDisposition, ModelState, - _permittedExtensions, _fileSizeLimit); - if (!ModelState.IsValid || streamedFileContent == null) - return BadRequest(ModelState); - - //// 3. Stage: Valid XML checking using a simple XDocument.Load() - var xdocument = await XDocumentFileHelper.ProcessStreamedFile(streamedFileContent, ModelState); - if (!ModelState.IsValid || xdocument == null) - return UnprocessableEntity(ModelState); - - //// 4. Stage: Is it a Hamann-Document? What kind? - var retdocs = _xmlService.ProbeFile(xdocument, ModelState); - if (!ModelState.IsValid || retdocs == null || !retdocs.Any()) - return UnprocessableEntity(ModelState); - - //// 5. Stage: Saving the File(s) - foreach (var doc in retdocs) { - // Physical saving - await _xmlProvider.Save(doc, _targetFilePath, ModelState); - // Setting the new docuemnt as used - _xmlService.Use(doc); - // Unsetting all old docuemnts as ununsed - _xmlService.AutoUse(doc.Prefix); - if (!ModelState.IsValid) return StatusCode(500, ModelState); - if (docs == null) docs = new List(); - docs.Add(doc); - } - xdocument = null; - retdocs = null; - streamedFileContent = null; - } - - try { - section = await reader.ReadNextSectionAsync(); - } catch (Exception ex) { - ModelState.AddModelError("Error", "The Request is bad: " + ex.Message); - return BadRequest(ModelState); - } - } - - // 6. Stage: Success! Returning Ok, and redirecting - JsonSerializerOptions options = new() { - ReferenceHandler = ReferenceHandler.Preserve, - Converters = { - new IdentificationStringJSONConverter() - } - }; - - string json = JsonSerializer.Serialize(docs); - _testService.Test(); - return Created(nameof(UploadController), json); - } - - - //// PUBLISH //// - [HttpPost] - [Route("API/LocalPublish")] - [DisableFormValueModelBinding] - [ValidateAntiForgeryToken] - [FeatureGate(Features.LocalPublishService, Features.AdminService, Features.UploadService)] - public async Task LocalPublish() { - var element = _xmlService.MergeUsedDocuments(ModelState); - if (!ModelState.IsValid || element == null) - return BadRequest(ModelState); - var savedfile = await _xmlProvider.SaveHamannFile(element, _targetFilePath, ModelState); - if (!ModelState.IsValid || savedfile == null) { - if (savedfile != null) - _xmlProvider.DeleteHamannFile(savedfile.Name); - return BadRequest(ModelState); - } - _ = _lib.SetLibrary(savedfile.PhysicalPath, ModelState); - if (!ModelState.IsValid) { - _xmlProvider.DeleteHamannFile(savedfile.Name); - return BadRequest(ModelState); - } - _xmlProvider.SetInProduction(savedfile); - _xmlService.SetInProduction(); - return Created("/", _xmlProvider.GetHamannFiles()); - } - - [HttpPost] - [Route("API/SetUsed/{id}")] - [DisableFormValueModelBinding] - [ValidateAntiForgeryToken] - [FeatureGate(Features.UploadService, Features.AdminService)] - public async Task SetUsed(string id) { - var f = _xmlProvider.GetFiles(id); - if (f == null) { - ModelState.AddModelError("Error", "Wrong Endpoint"); - return BadRequest(ModelState); - } - - var files = f.GetFileList(); - if (files == null) { - ModelState.AddModelError("Error", "Wrong Endpoint"); - return BadRequest(ModelState); - } - - List? newUsed = null; - - if (!MultipartRequestHelper.IsMultipartContentType(Request.ContentType)) { - ModelState.AddModelError("Error", $"Wrong / No Content Type on the Request"); - return BadRequest(ModelState); - } - - // Same as above, check Upload() - var boundary = MultipartRequestHelper.GetBoundary(MediaTypeHeaderValue.Parse(Request.ContentType), _defaultFormOptions.MultipartBoundaryLengthLimit); - var reader = new MultipartReader(boundary, HttpContext.Request.Body); - MultipartSection? section = null; - try { - section = await reader.ReadNextSectionAsync(); - } catch (Exception ex) { - ModelState.AddModelError("Error", "The Request is bad: " + ex.Message); - return BadRequest(ModelState); - } - - while (section != null) { - var hasContentDispositionHeader = ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out var contentDisposition); - - if (contentDisposition != null && contentDisposition.Name == "__RequestVerificationToken") { - try { - section = await reader.ReadNextSectionAsync(); - } catch (Exception ex) { - ModelState.AddModelError("Error", "The Request is bad: " + ex.Message); - } - continue; - } - - var filename = string.Empty; - if (hasContentDispositionHeader && contentDisposition != null) { - if (!MultipartRequestHelper.HasFormDataContentDisposition(contentDisposition)) { - ModelState.AddModelError("Error", $"Wrong Content-Dispostion Headers in Multipart Document"); - return BadRequest(ModelState); - } - - filename = XMLFileHelpers.StreamToString(section.Body, ModelState); - if (!ModelState.IsValid) return BadRequest(ModelState); - - var isFile = files.Where(x => x.FileName == filename); - if (isFile == null || !isFile.Any()) { - ModelState.AddModelError("Error", "Tried to add a file that does not exist."); - return BadRequest(ModelState); - } - - if (newUsed == null) newUsed = new List(); - newUsed.Add(isFile.First()); - } - - try { - section = await reader.ReadNextSectionAsync(); - } catch (Exception ex) { - ModelState.AddModelError("Error", "The Request is bad: " + ex.Message); - return BadRequest(ModelState); - } - } - - _xmlService.UnUse(id); - if (newUsed != null && newUsed.Any()) { - newUsed.ForEach(x => _xmlService.Use(x)); - } - - _testService.Test(); - return Created("/", newUsed); - } - - [HttpPost] [Route("API/SetInProduction")] [DisableFormValueModelBinding] [ValidateAntiForgeryToken] - [FeatureGate(Features.UploadService, Features.AdminService)] + [FeatureGate(Features.LocalPublishService, Features.AdminService)] public async Task SetInProduction() { var hF = _xmlProvider.GetHamannFiles(); if (hF == null) { @@ -334,31 +96,18 @@ public class APIController : Controller { } if (filename == null) { - ModelState.AddModelError("Error", "No filename given"); + ModelState.AddModelError("Error", "Kein Dateiname."); return BadRequest(ModelState); } var newFile = hF.Where(x => x.Name == filename); if (newFile == null || !newFile.Any()) { - ModelState.AddModelError("Error", "Trying to set a unavailable file."); + ModelState.AddModelError("Error", "Versuch, auf eine unverfügbare Datei zuzugreifen."); return BadRequest(ModelState); } - _ = _lib.SetLibrary(newFile.First().PhysicalPath, ModelState); + _ = _lib.SetLibrary(newFile.First(), null, ModelState); if (!ModelState.IsValid) return BadRequest(ModelState); - - _xmlProvider.SetInProduction(newFile.First()); - return Created("/", newFile.First()); } - - - [HttpPost] - [Route("API/SetYearSetting")] - [ValidateAntiForgeryToken] - [FeatureGate(Features.UploadService, Features.AdminService)] - public async Task? SetEndYear(YearSetting startendyear) { - _lib.SetEndYear(startendyear.EndYear); - return Created("/", "");; - } } \ No newline at end of file diff --git a/HaWeb/Controllers/AdminController.cs b/HaWeb/Controllers/AdminController.cs index ce3f1d2..89b76ae 100644 --- a/HaWeb/Controllers/AdminController.cs +++ b/HaWeb/Controllers/AdminController.cs @@ -18,6 +18,6 @@ public class AdminController : Controller { [Route("Admin")] [FeatureGate(Features.AdminService)] public IActionResult Index() { - return Redirect("/Admin/Upload"); + return Redirect("/Admin/XMLState"); } } \ No newline at end of file diff --git a/HaWeb/Controllers/BriefeContoller.cs b/HaWeb/Controllers/BriefeContoller.cs index 0fba25e..e03b0f0 100644 --- a/HaWeb/Controllers/BriefeContoller.cs +++ b/HaWeb/Controllers/BriefeContoller.cs @@ -143,7 +143,7 @@ public class Briefecontroller : Controller { } - private static List<(string Sender, string Receiver)> generateSendersRecievers(List? senders, List? receivers, bool generatePersonLinks) { + private static List<(string Sender, string Receiver)>? generateSendersRecievers(List? senders, List? receivers, bool generatePersonLinks) { var res = new List<(string Sender, string Receiver)>(); if (senders == null || receivers == null) return null; if (!generatePersonLinks) { diff --git a/HaWeb/Controllers/IndexController.cs b/HaWeb/Controllers/IndexController.cs index 6fdc14f..9514eda 100644 --- a/HaWeb/Controllers/IndexController.cs +++ b/HaWeb/Controllers/IndexController.cs @@ -13,11 +13,11 @@ namespace HaWeb.Controllers; public class IndexController : Controller { private IHaDocumentWrappper _lib; private IReaderService _readerService; - private IXMLService _xmlService; + private IXMLInteractionService _xmlService; private int _lettersForPage; private int _endYear; - public IndexController(IHaDocumentWrappper lib, IReaderService readerService, IXMLService service, IConfiguration config) { + public IndexController(IXMLFileProvider _, IHaDocumentWrappper lib, IReaderService readerService, IXMLInteractionService service, IConfiguration config) { _lib = lib; _readerService = readerService; _xmlService = service; diff --git a/HaWeb/Controllers/SucheController.cs b/HaWeb/Controllers/SucheController.cs index 91beb00..17ee90c 100644 --- a/HaWeb/Controllers/SucheController.cs +++ b/HaWeb/Controllers/SucheController.cs @@ -18,10 +18,10 @@ namespace HaWeb.Controllers; public class SucheController : Controller { private IHaDocumentWrappper _lib; private IReaderService _readerService; - private IXMLService _xmlService; + private IXMLInteractionService _xmlService; private int _lettersForPage; - public SucheController(IHaDocumentWrappper lib, IReaderService readerService, IXMLService service, IConfiguration config) { + public SucheController(IHaDocumentWrappper lib, IReaderService readerService, IXMLInteractionService service, IConfiguration config) { _lib = lib; _readerService = readerService; _xmlService = service; @@ -206,7 +206,7 @@ public class SucheController : Controller { string activeSearch, SearchType ST, SearchResultType SRT, - List comments) { + List? comments) { // Model init & return var model = new SucheViewModel(ST, SRT, null, 0, null, activeSearch, null, null, comments, null); return View("~/Views/HKB/Dynamic/Suche.cshtml", model); diff --git a/HaWeb/Controllers/UploadController.cs b/HaWeb/Controllers/UploadController.cs deleted file mode 100644 index f6d2f7d..0000000 --- a/HaWeb/Controllers/UploadController.cs +++ /dev/null @@ -1,107 +0,0 @@ -namespace HaWeb.Controllers; -using Microsoft.AspNetCore.Mvc; -using HaDocument.Interfaces; -using HaXMLReader.Interfaces; -using Microsoft.FeatureManagement.Mvc; -using System.Runtime.InteropServices; -using Microsoft.AspNetCore.Http.Features; -using Microsoft.Extensions.Configuration; -using HaWeb.Filters; -using HaWeb.XMLParser; -using HaWeb.Models; -using HaWeb.FileHelpers; -using Microsoft.AspNetCore.Mvc.Rendering; - -public class UploadController : Controller { - // DI - private IHaDocumentWrappper _lib; - private IReaderService _readerService; - private readonly long _fileSizeLimit; - private readonly string _targetFilePath; - private readonly IXMLService _xmlService; - private readonly IXMLProvider _xmlProvider; - - // Options - private static readonly string[] _permittedExtensions = { ".xml" }; - private static readonly FormOptions _defaultFormOptions = new FormOptions(); - - - public UploadController(IHaDocumentWrappper lib, IReaderService readerService, IXMLService xmlService, IXMLProvider xmlProvider, IConfiguration config) { - _lib = lib; - _readerService = readerService; - _xmlService = xmlService; - _xmlProvider = xmlProvider; - _fileSizeLimit = config.GetValue("FileSizeLimit"); - if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { - _targetFilePath = config.GetValue("StoredFilePathWindows"); - } else { - _targetFilePath = config.GetValue("StoredFilePathLinux"); - } - } - - [HttpGet] - [Route("Admin/Upload/{id?}")] - [FeatureGate(Features.AdminService)] - [GenerateAntiforgeryTokenCookie] - public IActionResult Index(string? id) { - var library = _lib.GetLibrary(); - var roots = _xmlService.GetRootsList(); - if (roots == null) return error404(); - - var hF = _xmlProvider.GetHamannFiles(); - List? hamannFiles = null; - if (hF != null) - hamannFiles = hF - .OrderByDescending(x => x.LastModified) - .Select(x => new FileModel(x.Name, string.Empty, x.LastModified.LocalDateTime, false, x == _xmlProvider.GetInProduction())) - .ToList(); - - var uF = _xmlService.GetUsedDictionary(); - var pF = _xmlService.GetInProduction(); - - Dictionary?>? usedFiles = null; - if (uF != null) { - usedFiles = new Dictionary?>(); - foreach (var kv in uF) { - if (kv.Value == null) continue; - usedFiles.Add(kv.Key, XMLFileHelpers.ToFileModel(kv.Value, pF, uF)); - } - } - - Dictionary?>? productionFiles = null; - if (pF != null) { - productionFiles = new Dictionary?>(); - foreach (var kv in pF) { - if (kv.Value == null) continue; - productionFiles.Add(kv.Key, XMLFileHelpers.ToFileModel(kv.Value, pF, uF)); - } - } - - var availableYears = library.MetasByYear.Select(x => x.Key).Union(library.ExcludedMetasByYear.Select(x => x.Key)).ToList(); - availableYears.Sort(); - if (id != null) { - id = id.ToLower(); - - var root = _xmlService.GetRoot(id); - if (root == null) return error404(); - - var model = new UploadViewModel(root.Type, id, roots, usedFiles, _lib.GetStartYear(), _lib.GetEndYear(), availableYears); - model.ProductionFiles = productionFiles; - model.HamannFiles = hamannFiles; - model.AvailableFiles = XMLFileHelpers.ToFileModel(_xmlProvider.GetFiles(id), pF, uF); - - return View("~/Views/Admin/Dynamic/Upload.cshtml", model); - } else { - var model = new UploadViewModel("Upload & Veröffentlichen", id, roots, usedFiles, _lib.GetStartYear(), _lib.GetEndYear(), availableYears); - model.ProductionFiles = productionFiles; - model.HamannFiles = hamannFiles; - - return View("~/Views/Admin/Dynamic/Upload.cshtml", model); - } - } - - private IActionResult error404() { - Response.StatusCode = 404; - return Redirect("/Error404"); - } -} \ No newline at end of file diff --git a/HaWeb/Controllers/XMLStateController.cs b/HaWeb/Controllers/XMLStateController.cs new file mode 100644 index 0000000..0545695 --- /dev/null +++ b/HaWeb/Controllers/XMLStateController.cs @@ -0,0 +1,50 @@ +namespace HaWeb.Controllers; +using Microsoft.AspNetCore.Mvc; +using Microsoft.FeatureManagement.Mvc; +using HaWeb.Filters; +using HaWeb.XMLParser; +using HaWeb.Models; +using HaWeb.FileHelpers; +using HaWeb.BackgroundTask; + +[FeatureGate(Features.AdminService)] +public class XMLStateController : Controller { + // DI + private IHaDocumentWrappper _lib; + private readonly IXMLInteractionService _xmlService; + private readonly IXMLFileProvider _xmlProvider; + private readonly IMonitorLoop _loop; + public XMLStateController(IMonitorLoop loop, IHaDocumentWrappper lib, IXMLInteractionService xmlService, IXMLFileProvider xmlProvider) { + _lib = lib; + _xmlService = xmlService; + _xmlProvider = xmlProvider; + _loop = loop; + } + + [HttpGet] + [Route("Admin/XMLState/")] + [FeatureGate(Features.AdminService)] + [GenerateAntiforgeryTokenCookie] + public IActionResult Index() { + _loop.StartMonitorLoop(); + var library = _lib.GetLibrary(); + var roots = _xmlService.GetRootsList(); + if (roots == null) return error404(); + + var hF = _xmlProvider.GetHamannFiles()?.OrderByDescending(x => x.LastModified).ToList(); + var mF = _xmlService.GetManagedFiles(); + var gD = _xmlProvider.GetGitData(); + var activeF = _lib.GetActiveFile(); + var vS = _xmlService.GetValidState(); + + var model = new XMLStateViewModel("Dateiübersicht", gD, roots, hF, mF, vS) { + ActiveFile = activeF, + }; + return View("~/Views/Admin/Dynamic/XMLState.cshtml", model); + } + + private IActionResult error404() { + Response.StatusCode = 404; + return Redirect("/Error404"); + } +} \ No newline at end of file diff --git a/HaWeb/FileHelpers/ConfigurationMonitor.cs b/HaWeb/FileHelpers/ConfigurationMonitor.cs new file mode 100644 index 0000000..530056a --- /dev/null +++ b/HaWeb/FileHelpers/ConfigurationMonitor.cs @@ -0,0 +1,85 @@ +using System.Timers; + +namespace HaWeb.FileHelpers; + +public class ConfigurationMonitor { + private System.Timers.Timer? _timer; + private (string, byte[])[]? _h; + private IServiceProvider _serviceProvider; + + public ConfigurationMonitor(string[] paths, IServiceProvider services) { + _h = _getHash(paths); + _serviceProvider = services; + } + + private static (string, byte[])[]? _getHash(string[] paths) { + if (paths == null || !paths.Any()) return null; + var ret = new List<(string, byte[])>(); + foreach(var c in paths) + ret.Add((c, _computeHash(c))); + return ret.ToArray(); + } + + private bool isEqual((string, byte[])[]? _h1, (string, byte[])[]? _h2) { + if (_h1 == null && _h2 == null) return true; + if (_h1 == null && _h2 != null) return false; + if (_h2 == null && _h1 != null) return false; + if (_h1!.Count() != _h2!.Count()) return false; + foreach (var h1 in _h1!) { + foreach (var h2 in _h2!) { + if (h1.Item1 == h2.Item1 && !Enumerable.SequenceEqual(h1.Item2, h2.Item2)) return false; + } + } + return true; + } + + public void InvokeChanged(string[] paths) { + var h = _getHash(paths); + if (_timer == null && !isEqual(h, _h)) { + _h = h; + _timer = new(5000) { AutoReset = false }; + _timer.Enabled = true; + _timer.Elapsed += Action; + } + } + + private void Action(Object source, System.Timers.ElapsedEventArgs e) { + Console.WriteLine("Configuration changed (ConfigurationMonitor Class)"); + using IServiceScope serviceScope = _serviceProvider.CreateScope(); + IServiceProvider provider = serviceScope.ServiceProvider; + + var cP = provider.GetRequiredService(); + var hP = provider.GetRequiredService(); + hP.ParseConfiguration(cP); + var fP = provider.GetRequiredService(); + fP.Reload(cP); + + // _lifetime.StopApplication(); + _timer = null; + } + + private static byte[] _computeHash(string filePath) { + var runCount = 1; + + while(runCount < 4) { + try { + if (File.Exists(filePath)) + using (var fs = File.OpenRead(filePath)) { + return System.Security.Cryptography.SHA1 + .Create().ComputeHash(fs); + } + else { + throw new FileNotFoundException(); + } + } + catch (IOException ex) { + if (runCount == 3) + throw; + + Thread.Sleep(TimeSpan.FromSeconds(Math.Pow(2, runCount))); + runCount++; + } + } + return new byte[20]; + } +} \ No newline at end of file diff --git a/HaWeb/FileHelpers/HaDocumentWrapper.cs b/HaWeb/FileHelpers/HaDocumentWrapper.cs index caeeac1..077d25c 100644 --- a/HaWeb/FileHelpers/HaDocumentWrapper.cs +++ b/HaWeb/FileHelpers/HaDocumentWrapper.cs @@ -9,12 +9,13 @@ using HaXMLReader.Interfaces; using HaWeb.SearchHelpers; using HaWeb.XMLParser; using System.Text; +using System.Xml.Linq; +using System.Diagnostics; public class HaDocumentWrapper : IHaDocumentWrappper { - private ILibrary Library; - private IXMLProvider _xmlProvider; - private IXMLService _xmlService; - private string _filepath; + private IFileInfo _ActiveFile; + private ILibrary? Library; + private IXMLInteractionService _xmlService; private int _startYear; private int _endYear; private List? _availablePersons; @@ -22,23 +23,14 @@ public class HaDocumentWrapper : IHaDocumentWrappper { // public List? SearchableLetters { get; private set; } - public HaDocumentWrapper(IXMLProvider xmlProvider, IXMLService service, IConfiguration configuration) { - _xmlProvider = xmlProvider; + public HaDocumentWrapper(IXMLInteractionService service, IConfiguration configuration) { _xmlService = service; + ParseConfiguration(configuration); + } + + public void ParseConfiguration(IConfiguration configuration) { _startYear = configuration.GetValue("AvailableStartYear"); _endYear = configuration.GetValue("AvailableEndYear"); - var filelist = xmlProvider.GetHamannFiles(); - if (filelist != null && filelist.Any()) { - _AutoLoad(filelist); - } - - // Use Fallback library - if (Library == null) { - var options = new HaWeb.Settings.HaDocumentOptions(); - if (SetLibrary(options.HamannXMLFilePath) == null) { - throw new Exception("Die Fallback Hamann.xml unter " + options.HamannXMLFilePath + " kann nicht geparst werden."); - } - } } public List? GetAvailablePersons() => _availablePersons; @@ -49,26 +41,29 @@ public class HaDocumentWrapper : IHaDocumentWrappper { public int GetEndYear() => _endYear; - public void SetEndYear(int end) { - this._endYear = end; - SetLibrary(_filepath); - } + public IFileInfo GetActiveFile() => _ActiveFile; - public ILibrary? SetLibrary(string filepath, ModelStateDictionary? ModelState = null) { - // 1. Set ILibrary + public ILibrary? SetLibrary(IFileInfo? file, XDocument? doc, ModelStateDictionary? ModelState = null) { + // Handle null on file & doc + var path = file == null ? new HaWeb.Settings.HaDocumentOptions().HamannXMLFilePath : file.PhysicalPath; + if (doc == null) doc = XDocument.Load(path, LoadOptions.PreserveWhitespace); + + // 1. Parse the Document, create search Index + if (_xmlService != null) + _xmlService.CreateSearchables(doc); + // 2. Set ILibrary try { - Library = HaDocument.Document.Create(new HaWeb.Settings.HaDocumentOptions() { HamannXMLFilePath = filepath, AvailableYearRange = (_startYear, _endYear) }); + Library = HaDocument.Document.Create(new HaWeb.Settings.HaDocumentOptions() { HamannXMLFilePath = path, AvailableYearRange = (_startYear, _endYear) }, doc.Root); } catch (Exception ex) { if (ModelState != null) ModelState.AddModelError("Error", "Das Dokument konnte nicht geparst werden: " + ex.Message); return null; } - // 1a. Set Available Persons + // 3a. Set Available Persons var persons = Library.Metas.SelectMany(x => x.Value.Senders.Union(x.Value.Receivers)).Distinct(); _availablePersons = persons.Select(x => Library.Persons[x]).OrderBy(x => x.Surname).ThenBy(x => x.Prename).ToList(); - // 1b. Setup a Dictionary with available Person ovierview Pages - + // 3b. Setup a Dictionary with available Person ovierview Pages _personsWithLetters = new Dictionary(); var availablePersonPages = Library.Persons.Where(x => !String.IsNullOrWhiteSpace(x.Value.Komm)); foreach (var p in availablePersonPages) { @@ -77,30 +72,12 @@ public class HaDocumentWrapper : IHaDocumentWrappper { } } - // 2. Set Library in Production, collect some Objects - if (_xmlService != null) - _xmlService.SetInProduction(System.Xml.Linq.XDocument.Load(filepath, System.Xml.Linq.LoadOptions.PreserveWhitespace)); - - // 3. Set Filepath - _filepath = filepath; + // 4. Set info on loaded file + _ActiveFile = file; return Library; } - public ILibrary GetLibrary() { + public ILibrary? GetLibrary() { return Library; } - - private void _AutoLoad(List files) { - var orderdlist = files.OrderByDescending(x => x.LastModified); - foreach (var item in orderdlist) { - if (SetLibrary(item.PhysicalPath) != null) { - _xmlProvider.SetInProduction(item); - return; - } - } - } - - private string _prepareSearch(HaDocument.Interfaces.ISearchable objecttoseach) { - return SearchHelpers.StringHelpers.NormalizeWhiteSpace(objecttoseach.Element, ' ', false); - } } \ No newline at end of file diff --git a/HaWeb/FileHelpers/IConfigurationMonitor.cs b/HaWeb/FileHelpers/IConfigurationMonitor.cs new file mode 100644 index 0000000..788d0b7 --- /dev/null +++ b/HaWeb/FileHelpers/IConfigurationMonitor.cs @@ -0,0 +1,6 @@ +namespace HaWeb.FileHelpers; + +public interface IConfigurationMonitor +{ + +} \ No newline at end of file diff --git a/HaWeb/FileHelpers/IHaDocumentWrapper.cs b/HaWeb/FileHelpers/IHaDocumentWrapper.cs index b8732fe..149d4ae 100644 --- a/HaWeb/FileHelpers/IHaDocumentWrapper.cs +++ b/HaWeb/FileHelpers/IHaDocumentWrapper.cs @@ -3,14 +3,16 @@ using HaDocument.Interfaces; using HaDocument.Models; using Microsoft.AspNetCore.Mvc.ModelBinding; using HaXMLReader.Interfaces; +using Microsoft.Extensions.FileProviders; +using System.Xml.Linq; public interface IHaDocumentWrappper { - public ILibrary? SetLibrary(string filepath, ModelStateDictionary ModelState); - public ILibrary GetLibrary(); - + public ILibrary? SetLibrary(IFileInfo? file, XDocument? doc, ModelStateDictionary? ModelState); + public ILibrary? GetLibrary(); + public void ParseConfiguration(IConfiguration configuration); public int GetStartYear(); public int GetEndYear(); + public IFileInfo GetActiveFile(); public List? GetAvailablePersons(); public Dictionary? GetPersonsWithLetters(); - public void SetEndYear(int end); } \ No newline at end of file diff --git a/HaWeb/FileHelpers/IXMLFileProvider.cs b/HaWeb/FileHelpers/IXMLFileProvider.cs new file mode 100644 index 0000000..e308caf --- /dev/null +++ b/HaWeb/FileHelpers/IXMLFileProvider.cs @@ -0,0 +1,17 @@ +namespace HaWeb.FileHelpers; +using Microsoft.Extensions.FileProviders; +using System.Xml.Linq; +using HaWeb.Models; + +using Microsoft.AspNetCore.Mvc.ModelBinding; + +public interface IXMLFileProvider { + public List? GetWorkingTreeFiles(); + public IFileInfo? SaveHamannFile(XElement element, string basefilepath, ModelStateDictionary ModelState); + public List? GetHamannFiles(); + public (DateTime PullTime, string Hash)? GetGitData(); + public void Reload(IConfiguration config); + public bool HasChanged(); + public void DeleteHamannFile(string filename); + public void Scan(); +} \ No newline at end of file diff --git a/HaWeb/FileHelpers/IXMLProvider.cs b/HaWeb/FileHelpers/IXMLProvider.cs deleted file mode 100644 index e411d6c..0000000 --- a/HaWeb/FileHelpers/IXMLProvider.cs +++ /dev/null @@ -1,16 +0,0 @@ -namespace HaWeb.FileHelpers; -using Microsoft.Extensions.FileProviders; -using System.Xml.Linq; -using HaWeb.Models; - -using Microsoft.AspNetCore.Mvc.ModelBinding; - -public interface IXMLProvider { - public FileList? GetFiles(string prefix); - public Task Save(XMLRootDocument doc, string basefilepath, ModelStateDictionary ModelState); - public Task SaveHamannFile(XElement element, string basefilepath, ModelStateDictionary ModelState); - public List? GetHamannFiles(); - public IFileInfo? GetInProduction(); - public void SetInProduction(IFileInfo info); - public void DeleteHamannFile(string filename); -} \ No newline at end of file diff --git a/HaWeb/FileHelpers/XDocumentFileHelpers.cs b/HaWeb/FileHelpers/XDocumentFileHelpers.cs deleted file mode 100644 index 70ad09b..0000000 --- a/HaWeb/FileHelpers/XDocumentFileHelpers.cs +++ /dev/null @@ -1,32 +0,0 @@ -namespace HaWeb.FileHelpers; -using System.Xml.Linq; -using Microsoft.AspNetCore.Mvc.ModelBinding; -using System.Text; -using System.Xml; - -public static class XDocumentFileHelper { - - private readonly static XmlReaderSettings _Settings = new XmlReaderSettings() { - CloseInput = true, - CheckCharacters = false, - ConformanceLevel = ConformanceLevel.Fragment, - IgnoreComments = true, - IgnoreProcessingInstructions = true, - IgnoreWhitespace = false - }; - - public static async Task ProcessStreamedFile(byte[] bytes, ModelStateDictionary modelState) { - try { - using (var stream = new MemoryStream(bytes)) { - using (var xmlreader = XmlReader.Create(stream, _Settings)) { - return XDocument.Load(xmlreader, LoadOptions.PreserveWhitespace | LoadOptions.SetLineInfo); - - } - } - } catch (Exception ex) { - modelState.AddModelError("Error", $"Kein gültiges XML-Dokument geladen. Error: {ex.Message}"); - } - - return null; - } -} \ No newline at end of file diff --git a/HaWeb/FileHelpers/XMLFileHelpers.cs b/HaWeb/FileHelpers/XMLFileHelpers.cs index e2c9d75..d6cf82b 100644 --- a/HaWeb/FileHelpers/XMLFileHelpers.cs +++ b/HaWeb/FileHelpers/XMLFileHelpers.cs @@ -54,152 +54,54 @@ public static class XMLFileHelpers { } }; - // Unused as of rn, used to take a file and do the same sanity checks as below - // public static async Task ProcessFormFile(IFormFile formFile, ModelStateDictionary modelState, string[] permittedExtensions, long sizeLimit) - // { - // var fieldDisplayName = string.Empty; - - // // Use reflection to obtain the display name for the model - // // property associated with this IFormFile. If a display - // // name isn't found, error messages simply won't show - // // a display name. - // MemberInfo property = - // typeof(T).GetProperty( - // formFile.Name.Substring(formFile.Name.IndexOf(".", - // StringComparison.Ordinal) + 1)); - - // if (property != null) - // { - // if (property.GetCustomAttribute(typeof(DisplayAttribute)) is - // DisplayAttribute displayAttribute) - // { - // fieldDisplayName = $"{displayAttribute.Name} "; - // } - // } - - // // Don't trust the file name sent by the client. To display - // // the file name, HTML-encode the value. - // var trustedFileNameForDisplay = WebUtility.HtmlEncode( - // formFile.FileName); - - // // Check the file length. This check doesn't catch files that only have - // // a BOM as their content. - // if (formFile.Length == 0) - // { - // modelState.AddModelError(formFile.Name, - // $"{fieldDisplayName}({trustedFileNameForDisplay}) is empty."); - - // return Array.Empty(); - // } - - // if (formFile.Length > sizeLimit) - // { - // var megabyteSizeLimit = sizeLimit / 1048576; - // modelState.AddModelError(formFile.Name, - // $"{fieldDisplayName}({trustedFileNameForDisplay}) exceeds " + - // $"{megabyteSizeLimit:N1} MB."); - - // return Array.Empty(); - // } - - // try - // { - // using (var memoryStream = new MemoryStream()) - // { - // await formFile.CopyToAsync(memoryStream); - - // // Check the content length in case the file's only - // // content was a BOM and the content is actually - // // empty after removing the BOM. - // if (memoryStream.Length == 0) - // { - // modelState.AddModelError(formFile.Name, - // $"{fieldDisplayName}({trustedFileNameForDisplay}) is empty."); - // } - - // if (!IsValidFileExtensionAndSignature( - // formFile.FileName, memoryStream, permittedExtensions)) - // { - // modelState.AddModelError(formFile.Name, - // $"{fieldDisplayName}({trustedFileNameForDisplay}) file " + - // "type isn't permitted or the file's signature " + - // "doesn't match the file's extension."); - // } - // else - // { - // return memoryStream.ToArray(); - // } - // } - // } - // catch (Exception ex) - // { - // modelState.AddModelError(formFile.Name, - // $"{fieldDisplayName}({trustedFileNameForDisplay}) upload failed. " + - // $"Please contact the Help Desk for support. Error: {ex.HResult}"); - // } - - // return Array.Empty(); + // public static List? ToFileModel(FileList? fileList) { + // if (fileList == null) return null; + // var fL = fileList.GetFileList(); + // if (fL == null) return null; + // var ret = new List(); + // foreach (var f in fL) { + // if (f.File == null) continue; + // ret.Add(ToFileModel(f)); + // }; + // return ret.OrderBy(x => x.LastModified).ToList(); // } - public static List? ToFileModel(FileList? fileList, Dictionary? productionFiles = null, Dictionary? usedFiles = null) { - if (fileList == null) return null; - var fL = fileList.GetFileList(); - if (fL == null) return null; - var ret = new List(); - foreach (var f in fL) { - if (f.File == null) continue; - ret.Add(ToFileModel(f, productionFiles, usedFiles)); - }; - return ret.OrderBy(x => x.LastModified).ToList(); - } + // // TODO: File State IsValid + // public static FileModel ToFileModel(XMLRootDocument document) { + // string id = document.Prefix; + // var model = new FileModel(document.FileName, document.File.LastModified.LocalDateTime, true) { + // Fields = document.Fields, + // Messages = document.GetLog(), + // Prefix = id + // }; + // return model; + // } - public static FileModel ToFileModel(XMLRootDocument document, Dictionary? productionFiles = null, Dictionary? usedFiles = null) { - string id = document.Prefix; - - bool inProduction = false; - if (productionFiles != null && productionFiles.ContainsKey(id)) { - inProduction = productionFiles[id]!.Contains(document); - } - - bool isUsed = false; - if (usedFiles != null && usedFiles.ContainsKey(id)) { - isUsed = usedFiles[id]!.Contains(document); - } - - var model = new FileModel(document.FileName, document.Prefix, document.File.LastModified.LocalDateTime, isUsed, inProduction) { Fields = document.Fields }; - model.Messages = document.GetLog(); - return model; - } - - public static async Task ProcessStreamedFile( - MultipartSection section, ContentDispositionHeaderValue contentDisposition, - ModelStateDictionary modelState, string[] permittedExtensions, long sizeLimit) { + public static bool ProcessFile( + Stream file, + string fileName, + StringBuilder errorMessages, + string[] permittedExtensions, + long sizeLimit) { try { - using (var memoryStream = new MemoryStream()) { - await section.Body.CopyToAsync(memoryStream); - - // Check if the file is empty or exceeds the size limit. - if (memoryStream.Length == 0) - modelState.AddModelError("Error", "The file is empty."); - else if (memoryStream.Length > sizeLimit) { - var megabyteSizeLimit = sizeLimit / 1048576; - modelState.AddModelError("Error", $"The file exceeds {megabyteSizeLimit:N1} MB."); - } - - // Check file extension and first bytes - else if (!IsValidFileExtensionAndSignature(contentDisposition.FileName.Value, memoryStream, permittedExtensions)) - modelState.AddModelError("Error", "The file must be of the following specs:
" + - "1. The file must hava a .xml File-Extension
" + - "2. To make sure the file isn't executable the file must start with: or "); - - // Return the File as a byte array - else return memoryStream.ToArray(); + // Check if the file is empty or exceeds the size limit. + if (file.Length == 0) { + errorMessages.AppendLine("Die Datei ist leer."); + return false; + } + else if (file.Length > sizeLimit) { + var megabyteSizeLimit = sizeLimit / 1048576; + errorMessages.AppendLine($"Die Datei überschreitet das Größenlimit {megabyteSizeLimit:N1} MB."); + return false; } - } catch (Exception ex) { - modelState.AddModelError("Error", $"The upload failed. Error: {ex.Message}"); - } - return null; + // Return orderly, if signature & extension okay + else return IsValidFileExtensionAndSignature(fileName, file, errorMessages, permittedExtensions); + + } catch (Exception ex) { + errorMessages.AppendLine($"The upload failed. Error: {ex.Message}"); + return false; + } } public static string? StreamToString(System.IO.Stream stream, ModelStateDictionary modelState) { @@ -216,22 +118,26 @@ public static class XMLFileHelpers { } } - private static bool IsValidFileExtensionAndSignature(string fileName, Stream data, string[] permittedExtensions) { + private static bool IsValidFileExtensionAndSignature(string fileName, Stream data, StringBuilder errorMessages, string[] permittedExtensions) { if (string.IsNullOrEmpty(fileName) || data == null || data.Length == 0) return false; var ext = Path.GetExtension(fileName).ToLowerInvariant(); - - if (string.IsNullOrEmpty(ext) || !permittedExtensions.Contains(ext)) + if (string.IsNullOrEmpty(ext) || !permittedExtensions.Contains(ext)) { + errorMessages.AppendLine("Dateiname endet nicht auf .xml"); return false; + } data.Position = 0; - using (var reader = new BinaryReader(data)) { var signatures = _fileSignature[ext]; var headerBytes = reader.ReadBytes(signatures.Max(m => m.Length)); - return signatures.Any(signature => - headerBytes.Take(signature.Length).SequenceEqual(signature)); + if (!signatures.Any(signature => + headerBytes.Take(signature.Length).SequenceEqual(signature))) { + errorMessages.AppendLine("Datei muss mit oder beginnen."); + return false; + }; } + return true; } } \ No newline at end of file diff --git a/HaWeb/FileHelpers/XMLFileProvider.cs b/HaWeb/FileHelpers/XMLFileProvider.cs new file mode 100644 index 0000000..6bf89ca --- /dev/null +++ b/HaWeb/FileHelpers/XMLFileProvider.cs @@ -0,0 +1,228 @@ +namespace HaWeb.FileHelpers; +using Microsoft.Extensions.FileProviders; +using Microsoft.AspNetCore.Mvc.ModelBinding; +using HaWeb.Models; +using HaWeb.XMLParser; +using HaWeb.XMLTests; +using System.Xml.Linq; +using System.Runtime.InteropServices; +using System.Diagnostics; + +// XMLProvider provides a wrapper around the available XML data on a FILE basis +public class XMLFileProvider : IXMLFileProvider { + private readonly IHaDocumentWrappper _Lib; + private readonly IXMLInteractionService _XMLService; + + private IFileProvider _hamannFileProvider; + private IFileProvider _bareRepositoryFileProvider; + private IFileProvider _workingTreeFileProvider; + + private string _Branch; + + private List? _WorkingTreeFiles; + private List? _HamannFiles; + + private static (DateTime PullTime, string Hash)? _GitData; + + // Startup (LAST) + public XMLFileProvider(IXMLInteractionService xmlservice, IHaDocumentWrappper _lib, IConfiguration config) { + // TODO: Test Read / Write Access + _Lib = _lib; + _XMLService = xmlservice; + + _Branch = config.GetValue("RepositoryBranch"); + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { + _hamannFileProvider = new PhysicalFileProvider(config.GetValue("HamannFileStoreWindows")); + _bareRepositoryFileProvider = new PhysicalFileProvider(config.GetValue("BareRepositoryPathWindows")); + _workingTreeFileProvider = new PhysicalFileProvider(config.GetValue("WorkingTreePathWindows")); + } + else { + _hamannFileProvider = new PhysicalFileProvider(config.GetValue("HamannFileStoreLinux")); + _bareRepositoryFileProvider = new PhysicalFileProvider(config.GetValue("BareRepositoryPathLinux")); + _workingTreeFileProvider = new PhysicalFileProvider(config.GetValue("WorkingTreePathLinux")); + } + + // Create File Lists; Here and in xmlservice, which does preliminary checking + Scan(); + if (_WorkingTreeFiles != null && _WorkingTreeFiles.Any()) { + xmlservice.Collect(_WorkingTreeFiles); + } + _HamannFiles = _ScanHamannFiles(); + + // Check if hamann file already is current working tree status + // -> YES: Load up the file via _lib.SetLibrary(); + if (_IsAlreadyParsed()) { + _Lib.SetLibrary(_HamannFiles.First(), null, null); + if (_Lib.GetLibrary() != null) return; + } + + // -> NO: Try to create a new file + var created = xmlservice.TryCreate(); + if (created != null) { + var file = SaveHamannFile(created, _hamannFileProvider.GetFileInfo("./").PhysicalPath, null); + if (file != null) { + _lib.SetLibrary(file, created.Document, null); + if (_Lib.GetLibrary() != null) return; + } + } + + // It failed, so use the last best File: + else if (_HamannFiles != null && _HamannFiles.Any()) { + _Lib.SetLibrary(_HamannFiles.First(), null, null); + if (_Lib.GetLibrary() != null) return; + } + + // -> There is none? Use Fallback: + else { + var options = new HaWeb.Settings.HaDocumentOptions(); + if (_lib.SetLibrary(null, null, null) == null) { + throw new Exception("Die Fallback Hamann.xml unter " + options.HamannXMLFilePath + " kann nicht geparst werden."); + } + } + } + + public void Reload(IConfiguration config) { + _Branch = config.GetValue("RepositoryBranch"); + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { + _hamannFileProvider = new PhysicalFileProvider(config.GetValue("HamannFileStoreWindows")); + _bareRepositoryFileProvider = new PhysicalFileProvider(config.GetValue("BareRepositoryPathWindows")); + _workingTreeFileProvider = new PhysicalFileProvider(config.GetValue("WorkingTreePathWindows")); + } + else { + _hamannFileProvider = new PhysicalFileProvider(config.GetValue("HamannFileStoreLinux")); + _bareRepositoryFileProvider = new PhysicalFileProvider(config.GetValue("BareRepositoryPathLinux")); + _workingTreeFileProvider = new PhysicalFileProvider(config.GetValue("WorkingTreePathLinux")); + } + + // Create File Lists; Here and in xmlservice, which does preliminary checking + Scan(); + if (_WorkingTreeFiles != null && _WorkingTreeFiles.Any()) { + _XMLService.Collect(_WorkingTreeFiles); + } + _HamannFiles = _ScanHamannFiles(); + + // Check if hamann file already is current working tree status + // -> YES: Load up the file via _lib.SetLibrary(); + if (_IsAlreadyParsed()) { + _Lib.SetLibrary(_HamannFiles.First(), null, null); + if (_Lib.GetLibrary() != null) return; + } + + // -> NO: Try to create a new file + var created = _XMLService.TryCreate(); + if (created != null) { + var file = SaveHamannFile(created, _hamannFileProvider.GetFileInfo("./").PhysicalPath, null); + if (file != null) { + _Lib.SetLibrary(file, created.Document, null); + if (_Lib.GetLibrary() != null) return; + } + } + + // It failed, so use the last best File: + else if (_HamannFiles != null && _HamannFiles.Any()) { + _Lib.SetLibrary(_HamannFiles.First(), null, null); + if (_Lib.GetLibrary() != null) return; + } + + // -> There is none? Use Fallback: + else { + var options = new HaWeb.Settings.HaDocumentOptions(); + if (_Lib.SetLibrary(null, null, null) == null) { + throw new Exception("Die Fallback Hamann.xml unter " + options.HamannXMLFilePath + " kann nicht geparst werden."); + } + } + } + + // Getters and Setters + public List? GetWorkingTreeFiles() => _WorkingTreeFiles; + + public (DateTime PullTime, string Hash)? GetGitData() => _GitData; + + public List? GetHamannFiles() => this._HamannFiles; + + // Functions + public void DeleteHamannFile(string filename) { + if (_HamannFiles == null) return; + var files = _HamannFiles.Where(x => x.Name == filename); + foreach (var file in files) { + File.Delete(file.PhysicalPath); + } + _HamannFiles.RemoveAll(x => x.Name == filename); + } + + public void Scan() { + _WorkingTreeFiles = _ScanWorkingTreeFiles(); + _GitData = _ScanGitData(); + } + + public IFileInfo? SaveHamannFile(XElement element, string basefilepath, ModelStateDictionary? ModelState) { + if (!_GitData.HasValue) return null; + var filename = "hamann_" + _GitData.Value.PullTime.Year + "-" + _GitData.Value.PullTime.Month + "-" + _GitData.Value.PullTime.Day + "_" + _GitData.Value.PullTime.Hour + "-" + _GitData.Value.PullTime.Minute + "." + _GitData.Value.Hash.Substring(0,7) + ".xml"; + var path = Path.Combine(basefilepath, filename); + + try { + if (!Directory.Exists(basefilepath)) + Directory.CreateDirectory(basefilepath); + using (var targetStream = System.IO.File.Create(path)) + element.Save(targetStream, SaveOptions.DisableFormatting); + } catch (Exception ex) { + if (ModelState != null) ModelState.AddModelError("Error", "Die Datei konnte nicht gespeichert werden: " + ex.Message); + return null; + } + + var info = _hamannFileProvider.GetFileInfo(filename); + if (info == null) { + if (ModelState != null) ModelState.AddModelError("Error", "Auf die neu erstellte Datei konnte nicht zugegriffen werden."); + return null; + } + + if (_HamannFiles == null) _HamannFiles = new List(); + _HamannFiles.RemoveAll(x => x.Name == info.Name); + _HamannFiles.Add(info); + return info; + } + + public bool HasChanged() { + if (!_GitData.HasValue) return true; + var current = _ScanGitData(); + if (current.Item2 != _GitData.Value.Hash) { + _GitData = current; + return true; + } + return false; + } + + private (DateTime, string) _ScanGitData() { + var head = _bareRepositoryFileProvider.GetFileInfo("refs/heads/" + _Branch); + return (head.LastModified.DateTime, File.ReadAllText(head.PhysicalPath)); + } + + private void _RegisterChangeCallbacks() { + var cT = _bareRepositoryFileProvider.Watch("refs/heads/" + _Branch); + } + + // Gets all XML Files + private List? _ScanWorkingTreeFiles() { + var files = _workingTreeFileProvider.GetDirectoryContents(string.Empty)!.Where(x => !x.IsDirectory && x.Name.EndsWith(".xml"))!.ToList(); + return files; + } + + private List? _ScanHamannFiles() { + var files = _hamannFileProvider.GetDirectoryContents(string.Empty).Where(x => !x.IsDirectory && x.Name.StartsWith("hamann") && x.Name.EndsWith(".xml")); + if (files == null || !files.Any()) return null; + return files.OrderByDescending(x => x.LastModified).ToList(); + } + + private string? _GetHashFromHamannFilename(string filename) { + var s = filename.Split('.', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (s.Length != 3 || s.Last() != "xml" || !s.First().StartsWith("hamann")) return null; + return s[1]; + } + + private bool _IsAlreadyParsed() { + if (_HamannFiles == null || !_HamannFiles.Any() || !_GitData.HasValue) return false; + var fhash = _GetHashFromHamannFilename(_HamannFiles.First().Name); + var ghash = _GitData.Value.Hash.Substring(0,7); + return fhash == ghash; + } +} \ No newline at end of file diff --git a/HaWeb/FileHelpers/XMLProvider.cs b/HaWeb/FileHelpers/XMLProvider.cs deleted file mode 100644 index 058f741..0000000 --- a/HaWeb/FileHelpers/XMLProvider.cs +++ /dev/null @@ -1,146 +0,0 @@ -namespace HaWeb.FileHelpers; -using Microsoft.Extensions.FileProviders; -using Microsoft.AspNetCore.Mvc.ModelBinding; -using HaWeb.Models; -using HaWeb.XMLParser; -using HaWeb.XMLTests; -using System.Xml.Linq; - -// XMLProvider provides a wrapper around the available XML data on a FILE basis -public class XMLProvider : IXMLProvider { - private IFileProvider _fileProvider; - private Dictionary? _Files; - private Dictionary? _Roots; - private List? _HamannFiles; - private Stack? _InProduction; - - public XMLProvider(IFileProvider provider, IXMLService xmlservice, IXMLTestService testService) { - _fileProvider = provider; - _Roots = xmlservice.GetRootsDictionary(); - _Files = _ScanFiles(); - _HamannFiles = _ScanHamannFiles(); - - if (_Files != null) - foreach (var category in _Files) - if (category.Value != null) - xmlservice.AutoUse(category.Value); - - testService.Test(); - } - - public List? GetHamannFiles() => this._HamannFiles; - - public IFileInfo? GetInProduction() { - if (_InProduction == null || !_InProduction.Any()) return null; - return this._InProduction.Peek(); - } - - public void DeleteHamannFile(string filename) { - if (_HamannFiles == null) return; - var files = _HamannFiles.Where(x => x.Name == filename); - foreach (var file in files) { - File.Delete(file.PhysicalPath); - } - _HamannFiles.RemoveAll(x => x.Name == filename); - } - - public void SetInProduction(IFileInfo info) { - if (_InProduction == null) _InProduction = new Stack(); - _InProduction.Push(info); - } - - public FileList? GetFiles(string prefix) - => _Files != null && _Files.ContainsKey(prefix) ? _Files[prefix] : null; - - // Saves a Document as file and adds it to the collection - public async Task Save(XMLRootDocument doc, string basefilepath, ModelStateDictionary ModelState) { - var type = doc.Prefix; - var directory = Path.Combine(basefilepath, type); - var path = Path.Combine(directory, doc.FileName); - - try { - if (!Directory.Exists(directory)) - Directory.CreateDirectory(directory); - using (var targetStream = System.IO.File.Create(path)) - await doc.Save(targetStream, ModelState); - } catch (Exception ex) { - ModelState.AddModelError("Error", "Speichern der Datei fehlgeschlagen: " + ex.Message); - return; - } - - var info = _fileProvider.GetFileInfo(Path.Combine(doc.Prefix, doc.FileName)); - if (info == null) { - ModelState.AddModelError("Error", "Auf die neu erstellte Datei konnte nicht zugegriffen werden."); - return; - } - - doc.File = info; - - if (_Files == null) _Files = new Dictionary(); - if (!_Files.ContainsKey(doc.Prefix)) _Files.Add(doc.Prefix, new FileList(doc.XMLRoot)); - _Files[doc.Prefix]!.Add(doc); - } - - public async Task SaveHamannFile(XElement element, string basefilepath, ModelStateDictionary ModelState) { - var date = DateTime.Now; - var filename = "hamann_" + date.Year + "-" + date.Month + "-" + date.Day + "." + Path.GetRandomFileName() + ".xml"; - var directory = Path.Combine(basefilepath, "hamann"); - var path = Path.Combine(directory, filename); - - try { - if (!Directory.Exists(directory)) - Directory.CreateDirectory(directory); - using (var targetStream = System.IO.File.Create(path)) - await element.SaveAsync(targetStream, SaveOptions.DisableFormatting, new CancellationToken()); - } catch (Exception ex) { - ModelState.AddModelError("Error", "Die Datei konnte nicht gespeichert werden: " + ex.Message); - return null; - } - - var info = _fileProvider.GetFileInfo(Path.Combine("hamann", filename)); - if (info == null) { - ModelState.AddModelError("Error", "Auf die neu erstellte Dtaei konnte nicht zugegriffen werden."); - return null; - } - - if (_HamannFiles == null) _HamannFiles = new List(); - _HamannFiles.RemoveAll(x => x.Name == info.Name); - _HamannFiles.Add(info); - return info; - } - - private Dictionary? _ScanFiles() { - if (_Roots == null) return null; - Dictionary? res = null; - var dirs = _fileProvider.GetDirectoryContents(string.Empty).Where(x => x.IsDirectory); - foreach (var dir in dirs) { - if (_Roots.ContainsKey(dir.Name)) { - if (_Files == null) _Files = new Dictionary(); - if (res == null) res = new Dictionary(); - res.Add(dir.Name, _ScanFiles(dir.Name)); - } - } - return res; - } - - private FileList? _ScanFiles(string prefix) { - if (_Roots == null) return null; - FileList? res = null; - var files = _fileProvider.GetDirectoryContents(prefix).Where(x => !x.IsDirectory && x.Name.StartsWith(prefix) && x.Name.EndsWith(".xml")); - foreach (var file in files) { - if (_Roots == null || !_Roots.ContainsKey(prefix)) - throw new Exception("Attempting to read a File from an unrecognized Prefix: " + prefix); - if (res == null) res = new FileList(_Roots[prefix]); - res.Add(new XMLRootDocument(_Roots[prefix], file)); - } - return res; - } - - private List? _ScanHamannFiles() { - var dir = _fileProvider.GetDirectoryContents(string.Empty).Where(x => x.IsDirectory && x.Name == "hamann"); - if (dir == null || !dir.Any()) return null; - var files = _fileProvider.GetDirectoryContents(dir.First().Name).Where(x => !x.IsDirectory && x.Name.StartsWith("hamann") && x.Name.EndsWith(".xml")); - if (files == null || !files.Any()) return null; - return files.ToList(); - } -} \ No newline at end of file diff --git a/HaWeb/Models/FileList.cs b/HaWeb/Models/FileList.cs index 0b9e240..8f33135 100644 --- a/HaWeb/Models/FileList.cs +++ b/HaWeb/Models/FileList.cs @@ -17,8 +17,6 @@ public class FileList { throw new Exception("Diese Liste kann nur Elemente des Typs " + XMLRoot.Prefix + " enthalten"); if (_Files == null) _Files = new HashSet(); - var replacing = _Files.Where(x => x.FileName == document.FileName); - if (replacing != null && replacing.Any()) _Files.Remove(replacing.First()); _Files.Add(document); } diff --git a/HaWeb/Models/FileModel.cs b/HaWeb/Models/FileModel.cs index 9c63f03..c5a00f4 100644 --- a/HaWeb/Models/FileModel.cs +++ b/HaWeb/Models/FileModel.cs @@ -1,19 +1,46 @@ +using System.Text; +using Microsoft.Extensions.FileProviders; + namespace HaWeb.Models; public class FileModel { public string FileName { get; private set; } - public string Prefix { get; private set; } - public DateTime LastModified { get; private set; } - public bool IsUsed { get; private set; } - public bool InProduction { get; private set; } - public List<(string, string?)>? Fields { get; set; } - public string? Messages { get; set; } + public IFileInfo File { get; private set; } - public FileModel(string name, string prefix, DateTime lastModified, bool isUsed, bool inProduction) { + // This affects only repo files + public bool IsValid { get; private set; } = false; + public List? Content { get; set; } + public List<(string, string?)>? Fields { get; set; } + public string? Prefix { get; set; } + + private StringBuilder? _log; + + public FileModel(string name, IFileInfo file) { FileName = name; - IsUsed = isUsed; - LastModified = lastModified; - InProduction = inProduction; - Prefix = prefix; + File = file; + } + + public string? GetLog() { + if (_log == null) return null; + return _log.ToString(); + } + + public void Log(string msg) { + if (_log == null) _log = new StringBuilder(); + var prefix = DateTime.Now.ToShortTimeString() + " "; + if (File != null) prefix += File.Name + ": "; + _log.AppendLine(prefix + msg); + } + + public void ResetLog() { + if (_log != null) _log.Clear(); + } + + public void Validate() { + IsValid = true; + } + + public DateTime GetLastModified() { + return File.LastModified.ToLocalTime().DateTime; } } \ No newline at end of file diff --git a/HaWeb/Models/SearchResult.cs b/HaWeb/Models/SearchResult.cs index d4032ea..8d48c51 100644 --- a/HaWeb/Models/SearchResult.cs +++ b/HaWeb/Models/SearchResult.cs @@ -7,7 +7,7 @@ using System.Collections.Generic; public class SearchResult { public string Search { get; private set; } public string Index { get; private set; } - public string Identifier { get; set; } + public string? Identifier { get; set; } public string? Page { get; set; } public string? Line { get; set; } public string? Preview { get; set; } diff --git a/HaWeb/Models/UploadViewModel.cs b/HaWeb/Models/UploadViewModel.cs deleted file mode 100644 index e7a66e1..0000000 --- a/HaWeb/Models/UploadViewModel.cs +++ /dev/null @@ -1,27 +0,0 @@ -namespace HaWeb.Models; -using HaWeb.XMLParser; -using Microsoft.Extensions.FileProviders; -using Microsoft.AspNetCore.Mvc.Rendering; - -public class UploadViewModel { - public string ActiveTitle { get; private set; } - public string? Prefix { get; private set; } - public List? AvailableRoots { get; private set; } - public List? AvailableFiles { get; set; } - public Dictionary?>? UsedFiles { get; private set; } - public Dictionary?>? ProductionFiles { get; set; } - public List AvailableYears { get; private set; } - public int StartYear { get; private set; } - public int EndYear { get; private set; } - public List? HamannFiles { get; set; } - - public UploadViewModel(string title, string? prefix, List? roots, Dictionary?>? usedFiles, int startYear, int endYear, List availableYears) { - Prefix = prefix; - ActiveTitle = title; - AvailableRoots = roots; - UsedFiles = usedFiles; - StartYear = startYear; - EndYear = endYear; - AvailableYears = availableYears; - } -} \ No newline at end of file diff --git a/HaWeb/Models/XMLRootDocument.cs b/HaWeb/Models/XMLRootDocument.cs index c647641..845dbfd 100644 --- a/HaWeb/Models/XMLRootDocument.cs +++ b/HaWeb/Models/XMLRootDocument.cs @@ -7,154 +7,26 @@ using HaWeb.XMLParser; using System.Text; public class XMLRootDocument { - private XElement? _Element; - private string? _filename; - private IFileInfo? _file; - private StringBuilder? _log; - + [JsonIgnore] + public XElement? Element { get; private set; } [JsonIgnore] public IXMLRoot XMLRoot { get; private set; } + public FileModel File { get; private set; } - public string FileName { - get { - if (_filename == null) - _filename = _CreateFilename(); - return _filename; - } - } - - [JsonIgnore] - public IFileInfo? File { - get { - return _file; - } - set { - _file = value; - _Element = null; - } } public string Prefix { get; private set; } - public DateTime Date { get; private set; } - + // UNUSED AS OF NOW public (string?, string?) IdentificationString { get; private set; } + // TODO: Fields public List<(string, string?)>? Fields { get; set; } - // Entry point for file reading - public XMLRootDocument(IXMLRoot xmlRoot, IFileInfo file) { - XMLRoot = xmlRoot; - Prefix = xmlRoot.Prefix; - File = file; - Date = file.LastModified.LocalDateTime; - _filename = file.Name; - _GenerateFieldsFromFilename(file.Name); - } - - // Entry point for XML upload reading - public XMLRootDocument(IXMLRoot xmlRoot, string prefix, (string?, string?) idString, XElement element) { + // Entry point for XML file reading + public XMLRootDocument(IXMLRoot xmlRoot, string prefix, (string?, string?) idString, XElement element, FileModel file) { XMLRoot = xmlRoot; Prefix = prefix; IdentificationString = idString; - Date = DateTime.Now; - _Element = element; + File = file; + Element = element; } - private string _CreateFilename() { - var filename = _removeInvalidChars(Prefix) + "_"; - if (!String.IsNullOrWhiteSpace(IdentificationString.Item1)) { - var hash = IdentificationString.Item1.GetHashCode().ToString("X8"); - filename += hash + "_"; - } - if (!String.IsNullOrWhiteSpace(IdentificationString.Item2)) filename += _removeInvalidChars(IdentificationString.Item2) + "_"; - filename += _removeInvalidChars(Date.Year.ToString() + "-" + Date.Month.ToString() + "-" + Date.Day.ToString()) + "." + Path.GetRandomFileName(); - return filename + ".xml"; - } - - private string _removeInvalidChars(string? s) { - if (String.IsNullOrWhiteSpace(s)) return string.Empty; - foreach (var c in Path.GetInvalidFileNameChars()) { - s = s.Replace(c, '-'); - } - s = s.Replace('_', '-'); - return s; - } - - private void _GenerateFieldsFromFilename(string filename) { - var split = filename.Split('_'); - Prefix = split[0]; - if (split.Length == 3) { - IdentificationString = (null, split[1]); - } else if (split.Length == 4) { - IdentificationString = (split[1], split[2]); - } else { - IdentificationString = (null, null); - } - } - - public string? GetLog() { - if (_log == null) return null; - return _log.ToString(); - } - - public void Log(string msg) { - if (_log == null) _log = new StringBuilder(); - var prefix = DateTime.Now.ToShortTimeString() + " "; - if (File != null) prefix += File.Name + ": "; - _log.AppendLine(prefix + msg); - } - - public void ResetLog() { - if (_log != null) _log.Clear(); - } - - // Call on UnUse to prevent memory hogging - public void UnUse() { - _Element = null; - _log = null; - } - - public XElement GetElement() { - if (_Element == null) - _Element = _GetElement(); - return _Element; - } - - private XElement _GetElement() { - if (File == null || String.IsNullOrWhiteSpace(File.PhysicalPath) || !File.Exists) - throw new Exception("Es ist kein Pfad für die XML-Datei vorhanden."); - - if (XMLRoot == null) - throw new Exception("Kein gültiges Hamann-Dokument: " + File.PhysicalPath + "Vom Prefix: " + Prefix); - - XDocument? doc = null; - try { - doc = XDocument.Load(File.PhysicalPath, LoadOptions.PreserveWhitespace | LoadOptions.SetLineInfo); - } catch (Exception ex) { - throw new Exception("Fehler beim Lesen des Dokuments: " + ex.Message); - } - - if (doc == null || doc.Root == null) - throw new Exception("Das Dokument ist ungültig und kann nicht gelesen werden: " + File.PhysicalPath); - - var element = XMLRoot.IsTypeOf(doc.Root); - if (element == null || !element.Any()) - throw new Exception("Kein gültiges Hamann-Dokument: " + File.PhysicalPath + "Vom Prefix: " + Prefix); - - return element.First(); - } - - public async Task Save(Stream stream, ModelStateDictionary state) { - if (XMLRoot == null) { - state.AddModelError("Error", "No corresponding Root Element found."); - return; - } - - if (_Element == null) { - if (File == null) { - state.AddModelError("Error", "There is neither a file nor a saved element for this Document aborting the save."); - return; - } - _Element = GetElement(); - } - - await XMLRoot.CreateHamannDocument(_Element).SaveAsync(stream, SaveOptions.DisableFormatting, new CancellationToken()); - } + } \ No newline at end of file diff --git a/HaWeb/Models/XMLStateViewModel.cs b/HaWeb/Models/XMLStateViewModel.cs new file mode 100644 index 0000000..50759b2 --- /dev/null +++ b/HaWeb/Models/XMLStateViewModel.cs @@ -0,0 +1,36 @@ +namespace HaWeb.Models; +using HaWeb.XMLParser; +using Microsoft.Extensions.FileProviders; +using Microsoft.AspNetCore.Mvc.Rendering; + +public class XMLStateViewModel { + // Titel der Seite / Aktiver Präfix + public string ActiveTitle { get; private set; } + public IFileInfo? ActiveFile { get; set; } + public (DateTime PullTime, string Hash)? GitData { get; private set; } + public bool ValidState { get; private set; } + + // Verfügbare Datei-Typen + public List? AvailableRoots { get; private set; } + + // Akuell geladene Dateien + public List? ManagedFiles { get; private set; } + + // Verfügbare (Gesamt-)Dateien + public List? HamannFiles { get; set; } + + public XMLStateViewModel( + string title, + (DateTime PullTime, string Hash)? gitData, + List? roots, + List? hamannFiles, + List? managedFiles, + bool validState) { + ActiveTitle = title; + AvailableRoots = roots; + HamannFiles = hamannFiles; + ManagedFiles = managedFiles; + GitData = gitData; + ValidState = validState; + } +} \ No newline at end of file diff --git a/HaWeb/Program.cs b/HaWeb/Program.cs index e53dc77..077cb97 100644 --- a/HaWeb/Program.cs +++ b/HaWeb/Program.cs @@ -4,42 +4,64 @@ using HaDocument.Interfaces; using HaWeb.XMLParser; using HaWeb.XMLTests; using HaWeb.FileHelpers; +using HaWeb.BackgroundTask; using Microsoft.FeatureManagement; using System.Runtime.InteropServices; using Microsoft.Extensions.FileProviders; +using Microsoft.AspNetCore.HttpOverrides; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Primitives; var builder = WebApplication.CreateBuilder(args); +List configpaths = new List(); + +// Add additional configuration +if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { + var p = builder.Configuration.GetValue("WorkingTreePathWindows") + "settings.json"; + configpaths.Add(p); + builder.Configuration.AddJsonFile(p, optional: true, reloadOnChange: true); +} else { + var p = builder.Configuration.GetValue("WorkingTreePathLinux") + "settings.json"; + configpaths.Add(p); + builder.Configuration.AddJsonFile(p, optional: true, reloadOnChange: true); +} + + + +// Create initial Data +var tS = new XMLTestService(); +var XMLIS = new XMLInteractionService(builder.Configuration, tS); +var hdW = new HaDocumentWrapper(XMLIS, builder.Configuration); +var XMLFP = new XMLFileProvider(XMLIS, hdW, builder.Configuration); // Add services to the container. builder.Services.AddControllersWithViews(); builder.Services.AddHttpContextAccessor(); - -// // To get files from a path provided by configuration: -// TODO: Test Read / Write Access -string? filepath = null; -if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { - filepath = builder.Configuration.GetValue("StoredFilePathWindows"); -} -else { - filepath = builder.Configuration.GetValue("StoredFilePathLinux"); -} - -if (filepath == null) { - throw new Exception("You need to set a specific Filepath, either StoredFilePathWindows or StoredFilePathLinux"); -} - -var physicalProvider = new PhysicalFileProvider(filepath); - -builder.Services.AddSingleton(physicalProvider); builder.Services.AddTransient(); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); +builder.Services.AddSingleton((_) => tS); +builder.Services.AddSingleton((_) => XMLIS); +builder.Services.AddSingleton((_) => hdW); +builder.Services.AddSingleton(_ => XMLFP); +// builder.Services.AddSingleton(); +// builder.Services.AddHostedService(); +// builder.Services.AddSingleton(ctx => +// { +// if (!int.TryParse(builder.Configuration["QueueCapacity"], out var queueCapacity)) +// queueCapacity = 100; +// return new BackgroundTaskQueue(queueCapacity); +// }); +// builder.Services.AddSingleton(); builder.Services.AddFeatureManagement(); - var app = builder.Build(); +// Reload config on change +var cM = new ConfigurationMonitor(configpaths.ToArray(), app.Services); +ChangeToken.OnChange( + () => app.Configuration.GetReloadToken(), + (state) => cM.InvokeChanged(state), + configpaths.ToArray() +); + // Configure the HTTP request pipeline. if (!app.Environment.IsDevelopment()) { @@ -47,6 +69,7 @@ if (!app.Environment.IsDevelopment()) // The default HSTS value is 30 days. You may want to change this for production scenarios, see https://aka.ms/aspnetcore-hsts. app.UseHsts(); app.UseHttpsRedirection(); + app.UseForwardedHeaders(new ForwardedHeadersOptions{ForwardedHeaders = ForwardedHeaders.XForwardedFor | ForwardedHeaders.XForwardedProto}); } app.UseAuthorization(); diff --git a/HaWeb/SearchHelpers/SearchRules.cs b/HaWeb/SearchHelpers/SearchRules.cs index f3379ee..9c1ab65 100644 --- a/HaWeb/SearchHelpers/SearchRules.cs +++ b/HaWeb/SearchHelpers/SearchRules.cs @@ -35,7 +35,7 @@ public class SearchRules { if (sb.Length >= sw.Length) { if (sb.ToString().Contains(sw)) { if (reader.State.Results == null) - reader.State.Results = new List<(string Page, string Line, string Identifier)>(); + reader.State.Results = new List<(string Page, string Line, string? Identifier)>(); reader.State.Results.Add((reader.CurrentPage, reader.CurrentLine, reader.State.CurrentIdentifier)); } sb.Remove(0, sb.Length - sw.Length); diff --git a/HaWeb/SearchHelpers/SearchState.cs b/HaWeb/SearchHelpers/SearchState.cs index 3936c05..e2fc0eb 100644 --- a/HaWeb/SearchHelpers/SearchState.cs +++ b/HaWeb/SearchHelpers/SearchState.cs @@ -7,7 +7,7 @@ public class SearchState : HaWeb.HTMLParser.IState { internal string? CurrentIdentifier; internal ILibrary? Lib; internal bool Normalize; - internal List<(string Page, string Line, string Identifier)>? Results; + internal List<(string Page, string Line, string? Identifier)>? Results; public SearchState(string searchword, bool normalize = false, ILibrary? lib = null) { Lib = lib; diff --git a/HaWeb/Settings/NodeRules/AppNode.cs b/HaWeb/Settings/NodeRules/AppNode.cs index e96f640..9501550 100644 --- a/HaWeb/Settings/NodeRules/AppNode.cs +++ b/HaWeb/Settings/NodeRules/AppNode.cs @@ -6,11 +6,14 @@ using HaWeb.XMLTests; public class AppNode : INodeRule { public string Name => "app"; - public string XPath => "//app"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "ueberlieferung" }, + XPath = "//app" + }; public string[]? Attributes { get; } = { "ref" }; public string? uniquenessAttribute => null; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { - ("ref", "//appDef", "index") + ("ref", new HamannXPath() { Documents = new[] { "personenorte" }, XPath = "//appDef" }, "index") }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/AutopsicNode.cs b/HaWeb/Settings/NodeRules/AutopsicNode.cs index f39101a..5ae2b36 100644 --- a/HaWeb/Settings/NodeRules/AutopsicNode.cs +++ b/HaWeb/Settings/NodeRules/AutopsicNode.cs @@ -6,10 +6,13 @@ using HaWeb.XMLTests; public class AutopsicNode : INodeRule { public string Name => "autopsic"; - public string XPath => "//autopsic"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "metadaten" }, + XPath = "//autopsic" + }; public string[]? Attributes { get; } = { "value" }; public string? uniquenessAttribute => "value" ; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/EditNode.cs b/HaWeb/Settings/NodeRules/EditNode.cs index 3d5ae0e..bf9377d 100644 --- a/HaWeb/Settings/NodeRules/EditNode.cs +++ b/HaWeb/Settings/NodeRules/EditNode.cs @@ -6,11 +6,14 @@ using HaWeb.XMLTests; public class EditNode : INodeRule { public string Name => "edit"; - public string XPath => "//edit"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "brieftext", "texteingriffe", "ueberlieferung" }, + XPath = "//edit" + }; public string[]? Attributes { get; } = { "ref" }; public string? uniquenessAttribute => null; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { - ("ref", "//editreason", "index") + ("ref", new HamannXPath() { Documents = new[] { "texteingriffe" }, XPath = "//editreason" }, "index") }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/HandNode.cs b/HaWeb/Settings/NodeRules/HandNode.cs index 3954ab7..3cc2608 100644 --- a/HaWeb/Settings/NodeRules/HandNode.cs +++ b/HaWeb/Settings/NodeRules/HandNode.cs @@ -6,11 +6,14 @@ using HaWeb.XMLTests; public class HandNode : INodeRule { public string Name => "hand"; - public string XPath => "//hand"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "ueberlieferung", "brieftext", "texteingriffe" }, + XPath = "//hand" + }; public string[]? Attributes { get; } = { "ref" }; public string? uniquenessAttribute => null; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { - ("ref", "//handDef", "index") + ("ref", new HamannXPath() { Documents = new[] { "personenorte" }, XPath = "//handDef" }, "index") }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/KommentarNode.cs b/HaWeb/Settings/NodeRules/KommentarNode.cs index 492f13e..9676b11 100644 --- a/HaWeb/Settings/NodeRules/KommentarNode.cs +++ b/HaWeb/Settings/NodeRules/KommentarNode.cs @@ -6,10 +6,13 @@ using HaWeb.XMLTests; public class KommentarNode : INodeRule { public string Name => "kommentar"; - public string XPath => "//kommentar"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "register" }, + XPath = "//kommentar" + }; public string[]? Attributes { get; } = { "id" }; public string? uniquenessAttribute => "id" ; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/LetterDescNode.cs b/HaWeb/Settings/NodeRules/LetterDescNode.cs index 7cd2652..515c757 100644 --- a/HaWeb/Settings/NodeRules/LetterDescNode.cs +++ b/HaWeb/Settings/NodeRules/LetterDescNode.cs @@ -6,10 +6,13 @@ using HaWeb.XMLTests; public class LetterDescNode : INodeRule { public string Name => "letterDesc"; - public string XPath => "//letterDesc"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "metadaten" }, + XPath = "//letterDesc" + }; public string[]? Attributes { get; } = { "ref" }; public string? uniquenessAttribute => "ref" ; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/LetterTextNode.cs b/HaWeb/Settings/NodeRules/LetterTextNode.cs index 86e0a38..0014d6a 100644 --- a/HaWeb/Settings/NodeRules/LetterTextNode.cs +++ b/HaWeb/Settings/NodeRules/LetterTextNode.cs @@ -6,10 +6,13 @@ using HaWeb.XMLTests; public class LetterTextNode : INodeRule { public string Name => "letterText"; - public string XPath => "//letterText"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "brieftext" }, + XPath = "//letterText" + }; public string[]? Attributes { get; } = { "index" }; public string? uniquenessAttribute => "index" ; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/LetterTraditionNode.cs b/HaWeb/Settings/NodeRules/LetterTraditionNode.cs index 31857a6..8c4a79f 100644 --- a/HaWeb/Settings/NodeRules/LetterTraditionNode.cs +++ b/HaWeb/Settings/NodeRules/LetterTraditionNode.cs @@ -6,10 +6,13 @@ using HaWeb.XMLTests; public class LetterTraditionNode : INodeRule { public string Name => "letterTradition"; - public string XPath => "//letterTradition"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "ueberlieferung" }, + XPath = "//letterTradition" + }; public string[]? Attributes { get; } = { "ref" }; public string? uniquenessAttribute => "ref" ; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/LinkNode.cs b/HaWeb/Settings/NodeRules/LinkNode.cs index 04b594f..431bbdc 100644 --- a/HaWeb/Settings/NodeRules/LinkNode.cs +++ b/HaWeb/Settings/NodeRules/LinkNode.cs @@ -6,12 +6,15 @@ using HaWeb.XMLTests; public class LinkNode : INodeRule { public string Name => "link"; - public string XPath => "//link"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "ueberlieferung", "stellenkommentar", "register", "texteingriffe" }, + XPath = "//link" + }; public string[]? Attributes { get; } = null; public string? uniquenessAttribute => null; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { - ("ref", "//kommentar", "id"), - ("subref", "//subsection", "id") + ("ref", new HamannXPath() { Documents = new[] { "register" }, XPath = "//kommentar" }, "id"), + ("subref", new HamannXPath() { Documents = new[] { "register" }, XPath = "//subsection" }, "id"), }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/MarginalNode.cs b/HaWeb/Settings/NodeRules/MarginalNode.cs index a2586b9..cbce3d8 100644 --- a/HaWeb/Settings/NodeRules/MarginalNode.cs +++ b/HaWeb/Settings/NodeRules/MarginalNode.cs @@ -6,10 +6,13 @@ using HaWeb.XMLTests; public class MarginalNode : INodeRule { public string Name => "marginal"; - public string XPath => "//marginal"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "stellenkommentar" }, + XPath = "//marginal" + }; public string[]? Attributes { get; } = { "index", "letter", "page", "line" }; public string? uniquenessAttribute => "index"; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/ReceiverNode.cs b/HaWeb/Settings/NodeRules/ReceiverNode.cs index dc7727b..9f9ce90 100644 --- a/HaWeb/Settings/NodeRules/ReceiverNode.cs +++ b/HaWeb/Settings/NodeRules/ReceiverNode.cs @@ -6,11 +6,14 @@ using HaWeb.XMLTests; public class Receiver : INodeRule { public string Name => "receiver"; - public string XPath => "//receiver"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "metadaten" }, + XPath = "//receiver" + }; public string[]? Attributes { get; } = { "ref" }; public string? uniquenessAttribute => null; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { - ("ref", "//personDef", "index") + ("ref", new HamannXPath() { Documents = new[] { "personenorte" }, XPath = "//personDef" }, "index") }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/SenderNode.cs b/HaWeb/Settings/NodeRules/SenderNode.cs index 91708a1..0d8f808 100644 --- a/HaWeb/Settings/NodeRules/SenderNode.cs +++ b/HaWeb/Settings/NodeRules/SenderNode.cs @@ -6,11 +6,14 @@ using HaWeb.XMLTests; public class SenderNode : INodeRule { public string Name => "sender"; - public string XPath => "//sender"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "metadaten" }, + XPath = "//sender" + }; public string[]? Attributes { get; } = { "ref" }; public string? uniquenessAttribute => null; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { - ("ref", "//personDef", "index") + ("ref", new HamannXPath() { Documents = new[] { "personenorte" }, XPath = "//personDef" }, "index") }; } \ No newline at end of file diff --git a/HaWeb/Settings/NodeRules/StructureCollection.cs b/HaWeb/Settings/NodeRules/StructureCollection.cs index fe3ddb0..d08c343 100644 --- a/HaWeb/Settings/NodeRules/StructureCollection.cs +++ b/HaWeb/Settings/NodeRules/StructureCollection.cs @@ -5,8 +5,15 @@ namespace HaWeb.Settings.NodeRules; public class StructureCollection : ICollectionRule { public string Name { get; } = "structure"; - public string[] Bases { get; } = { "//letterText", "//letterTradition" }; - public string[] Backlinks { get; } = { "//intlink", "//marginal" }; + public HamannXPath[] Bases { get; } = { + new HamannXPath() { Documents = new[] { "brieftext" }, XPath = "//letterText" }, + new HamannXPath() { Documents = new[] { "ueberlieferung" }, XPath = "//letterTradition"} + }; + public HamannXPath[] Backlinks { get; } = { + new HamannXPath() { Documents = new[] { "stellenkommentar", "ueberlieferung", "texteingriffe", "register" }, XPath = "//intlink" }, + new HamannXPath() { Documents = new[] { "stellenkommentar" }, XPath = "//marginal"} + }; + public IEnumerable<(string, XElement, XMLRootDocument)> GenerateIdentificationStrings(IEnumerable<(XElement, XMLRootDocument)> list) { foreach (var e in list) { var id = e.Item1.Name == "letterText" ? e.Item1.Attribute("index")!.Value : e.Item1.Attribute("ref")!.Value; diff --git a/HaWeb/Settings/NodeRules/SubsectionNode.cs b/HaWeb/Settings/NodeRules/SubsectionNode.cs index 9be66bf..bee310b 100644 --- a/HaWeb/Settings/NodeRules/SubsectionNode.cs +++ b/HaWeb/Settings/NodeRules/SubsectionNode.cs @@ -6,10 +6,13 @@ using HaWeb.XMLTests; public class SubsectionNode : INodeRule { public string Name => "subsection"; - public string XPath => "//subsection"; + public HamannXPath XPath => new HamannXPath() { + Documents = new[] { "register" }, + XPath = "//subsection" + }; public string[]? Attributes { get; } = { "id" }; public string? uniquenessAttribute => "id" ; - public List<(string, string, string)>? References { get; } = new List<(string, string, string)>() + public List<(string, HamannXPath, string)>? References { get; } = new List<(string, HamannXPath, string)>() { }; } \ No newline at end of file diff --git a/HaWeb/Settings/XMLRoots/CommentRoot.cs b/HaWeb/Settings/XMLRoots/CommentRoot.cs index bb9f772..df969bb 100644 --- a/HaWeb/Settings/XMLRoots/CommentRoot.cs +++ b/HaWeb/Settings/XMLRoots/CommentRoot.cs @@ -6,7 +6,7 @@ using HaWeb.XMLParser; public class CommentRoot : HaWeb.XMLParser.IXMLRoot { public string Type { get; } = "Register"; public string Prefix { get; } = "register"; - public string[] XPathContainer { get; } = { ".//data//kommentare/kommcat", ".//kommentare/kommcat" }; + public string[] XPathContainer { get; } = { "/opus/data//kommentare/kommcat", "/opus//kommentare/kommcat" }; public Predicate IsCollectedObject { get; } = (elem) => { if (elem.Name == "kommentar") return true; @@ -39,7 +39,7 @@ public class CommentRoot : HaWeb.XMLParser.IXMLRoot { public void MergeIntoFile(XElement file, XMLRootDocument document) { if (file.Element("kommentare") == null) file.AddFirst(new XElement("kommentare")); - file.Element("kommentare")!.AddFirst(document.GetElement()); + file.Element("kommentare")!.AddFirst(document.Element); } } \ No newline at end of file diff --git a/HaWeb/Settings/XMLRoots/DescriptionsRoot.cs b/HaWeb/Settings/XMLRoots/DescriptionsRoot.cs index c2d180d..1800a80 100644 --- a/HaWeb/Settings/XMLRoots/DescriptionsRoot.cs +++ b/HaWeb/Settings/XMLRoots/DescriptionsRoot.cs @@ -6,7 +6,7 @@ using HaWeb.XMLParser; public class DescriptionsRoot : HaWeb.XMLParser.IXMLRoot { public string Type { get; } = "Metadaten"; public string Prefix { get; } = "metadaten"; - public string[] XPathContainer { get; } = { ".//data/descriptions", ".//descriptions" }; + public string[] XPathContainer { get; } = { "/opus/data/descriptions", "/opus/descriptions" }; public Predicate IsCollectedObject { get; } = (elem) => { if (elem.Name == "letterDesc") return true; @@ -34,7 +34,7 @@ public class DescriptionsRoot : HaWeb.XMLParser.IXMLRoot { public void MergeIntoFile(XElement file, XMLRootDocument document) { if (file.Element("descriptions") == null) file.AddFirst(new XElement("descriptions")); - var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x)); + var elements = document.Element.Elements().Where(x => IsCollectedObject(x)); var root = file.Element("descriptions"); foreach (var element in elements) { root!.Add(element); diff --git a/HaWeb/Settings/XMLRoots/DocumentRoot.cs b/HaWeb/Settings/XMLRoots/DocumentRoot.cs index 134863c..9cfed19 100644 --- a/HaWeb/Settings/XMLRoots/DocumentRoot.cs +++ b/HaWeb/Settings/XMLRoots/DocumentRoot.cs @@ -7,7 +7,7 @@ using System.IO; public class DocumentRoot : HaWeb.XMLParser.IXMLRoot { public string Type { get; } = "Brieftext"; public string Prefix { get; } = "brieftext"; - public string[] XPathContainer { get; } = { ".//data/document", ".//document" }; + public string[] XPathContainer { get; } = { "/opus/data/document", "/opus/document" }; public Predicate IsCollectedObject { get; } = (elem) => { if (elem.Name == "letterText") return true; @@ -35,7 +35,7 @@ public class DocumentRoot : HaWeb.XMLParser.IXMLRoot { public void MergeIntoFile(XElement file, XMLRootDocument document) { if (file.Element("document") == null) file.AddFirst(new XElement("document")); - var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x)); + var elements = document.Element.Elements().Where(x => IsCollectedObject(x)); var root = file.Element("document"); foreach (var element in elements) { root!.Add(element); diff --git a/HaWeb/Settings/XMLRoots/EditsRoot.cs b/HaWeb/Settings/XMLRoots/EditsRoot.cs index 0c867af..c8f5c28 100644 --- a/HaWeb/Settings/XMLRoots/EditsRoot.cs +++ b/HaWeb/Settings/XMLRoots/EditsRoot.cs @@ -6,7 +6,7 @@ using HaWeb.XMLParser; public class EditsRoot : HaWeb.XMLParser.IXMLRoot { public string Type { get; } = "Texteingriffe"; public string Prefix { get; } = "texteingriffe"; - public string[] XPathContainer { get; } = { ".//data/edits", ".//edits" }; + public string[] XPathContainer { get; } = { "/opus/data/edits", "/opus/edits" }; public Predicate IsCollectedObject { get; } = (elem) => { if (elem.Name == "editreason") return true; @@ -34,7 +34,7 @@ public class EditsRoot : HaWeb.XMLParser.IXMLRoot { public void MergeIntoFile(XElement file, XMLRootDocument document) { if (file.Element("edits") == null) file.AddFirst(new XElement("edits")); - var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x)); + var elements = document.Element.Elements().Where(x => IsCollectedObject(x)); var root = file.Element("edits"); foreach (var element in elements) { root!.Add(element); diff --git a/HaWeb/Settings/XMLRoots/MarginalsRoot.cs b/HaWeb/Settings/XMLRoots/MarginalsRoot.cs index b9442a3..e162459 100644 --- a/HaWeb/Settings/XMLRoots/MarginalsRoot.cs +++ b/HaWeb/Settings/XMLRoots/MarginalsRoot.cs @@ -6,7 +6,7 @@ using HaWeb.XMLParser; public class MarginalsRoot : HaWeb.XMLParser.IXMLRoot { public string Type { get; } = "Stellenkommentar"; public string Prefix { get; } = "stellenkommentar"; - public string[] XPathContainer { get; } = { ".//data/marginalien", ".//marginalien" }; + public string[] XPathContainer { get; } = { "/opus/data/marginalien", "/opus/marginalien" }; public Predicate IsCollectedObject { get; } = (elem) => { if (elem.Name == "marginal") return true; @@ -34,7 +34,7 @@ public class MarginalsRoot : HaWeb.XMLParser.IXMLRoot { public void MergeIntoFile(XElement file, XMLRootDocument document) { if (file.Element("marginalien") == null) file.AddFirst(new XElement("marginalien")); - var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x)); + var elements = document.Element.Elements().Where(x => IsCollectedObject(x)); var root = file.Element("marginalien"); foreach (var element in elements) { root!.Add(element); diff --git a/HaWeb/Settings/XMLRoots/ReferencesRoot.cs b/HaWeb/Settings/XMLRoots/ReferencesRoot.cs index 8507b6d..94bedc2 100644 --- a/HaWeb/Settings/XMLRoots/ReferencesRoot.cs +++ b/HaWeb/Settings/XMLRoots/ReferencesRoot.cs @@ -6,7 +6,7 @@ using HaWeb.XMLParser; public class ReferencesRoot : HaWeb.XMLParser.IXMLRoot { public string Type { get; } = "Personen / Orte"; public string Prefix { get; } = "personenorte"; - public string[] XPathContainer { get; } = { ".//data/definitions", ".//definitions" }; + public string[] XPathContainer { get; } = { "/opus/data/definitions", "/opus/definitions" }; public Predicate IsCollectedObject { get; } = (elem) => { if (elem.Name == "personDefs" || elem.Name == "structureDefs" || elem.Name == "handDefs" || elem.Name == "locationDefs" || elem.Name == "appDefs") @@ -35,7 +35,7 @@ public class ReferencesRoot : HaWeb.XMLParser.IXMLRoot { public void MergeIntoFile(XElement file, XMLRootDocument document) { if (file.Element("definitions") == null) file.AddFirst(new XElement("definitions")); - var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x)); + var elements = document.Element.Elements().Where(x => IsCollectedObject(x)); var root = file.Element("definitions"); foreach (var element in elements) { root!.Add(element); diff --git a/HaWeb/Settings/XMLRoots/TraditionsRoot.cs b/HaWeb/Settings/XMLRoots/TraditionsRoot.cs index 927df7e..bbf9249 100644 --- a/HaWeb/Settings/XMLRoots/TraditionsRoot.cs +++ b/HaWeb/Settings/XMLRoots/TraditionsRoot.cs @@ -6,7 +6,7 @@ using HaWeb.XMLParser; public class TraditionsRoot : HaWeb.XMLParser.IXMLRoot { public string Type { get; } = "Überlieferung"; public string Prefix { get; } = "ueberlieferung"; - public string[] XPathContainer { get; } = { ".//data/traditions", ".//traditions" }; + public string[] XPathContainer { get; } = { "/opus/data/traditions", "/opus/traditions" }; public Predicate IsCollectedObject { get; } = (elem) => { if (elem.Name == "letterTradition") return true; @@ -34,7 +34,7 @@ public class TraditionsRoot : HaWeb.XMLParser.IXMLRoot { public void MergeIntoFile(XElement file, XMLRootDocument document) { if (file.Element("traditions") == null) file.AddFirst(new XElement("traditions")); - var elements = document.GetElement().Elements().Where(x => IsCollectedObject(x)); + var elements = document.Element.Elements().Where(x => IsCollectedObject(x)); var root = file.Element("traditions"); foreach (var element in elements) { root!.Add(element); diff --git a/HaWeb/Views/Admin/Dynamic/Upload.cshtml b/HaWeb/Views/Admin/Dynamic/Upload.cshtml deleted file mode 100644 index 517d2ab..0000000 --- a/HaWeb/Views/Admin/Dynamic/Upload.cshtml +++ /dev/null @@ -1,104 +0,0 @@ -@model UploadViewModel; -@{ - ViewData["Title"] = "Upload & Veröffentlichen"; - ViewData["SEODescription"] = "Johann Georg Hamann: Kommentierte Briefausgabe, Hg. v. Leonard Keidel und Janina Reibold. Durchsuchbare Online-Ausgabe der Briefe von und an Johann Georg Hamann."; - ViewData["showCredits"] = "false"; -} - - - - - -
-

@Model.ActiveTitle

-
- -
- @* File Category Page File List *@ - @if (Model.AvailableFiles != null && Model.AvailableFiles.Any()) { -
-
-
Datei(en)
- @if(Model.UsedFiles != null && Model.UsedFiles.ContainsKey(Model.Prefix)) { -
- @foreach (var item in Model.UsedFiles[Model.Prefix]!) - { - if(item == Model.UsedFiles[Model.Prefix]!.Last()) { - @item.FileName - } - else { - @item.FileName, - } - } -
- } -
- -
- - @if (Model.UsedFiles != null && Model.Prefix != null && Model.UsedFiles.ContainsKey(Model.Prefix)) { - - } - } - - - @* Start Page File List *@ - else { -
- @await Html.PartialAsync("/Views/Shared/_PublishForm.cshtml", Model) -
- -
- @await Html.PartialAsync("/Views/Shared/_FileListForm.cshtml", (Model.HamannFiles, "Verfügbare Hamann-Dateien", "API", "SetInProduction", string.Empty, "/Download/XML/", false)) -
- -
- Verfügbare Jahre: bis  - - -
- } - -
\ No newline at end of file diff --git a/HaWeb/Views/Admin/Dynamic/XMLState.cshtml b/HaWeb/Views/Admin/Dynamic/XMLState.cshtml new file mode 100644 index 0000000..95036e5 --- /dev/null +++ b/HaWeb/Views/Admin/Dynamic/XMLState.cshtml @@ -0,0 +1,83 @@ +@model XMLStateViewModel; +@{ + ViewData["Title"] = "Syntax-Check & Dateien"; + ViewData["SEODescription"] = "Johann Georg Hamann: Kommentierte Briefausgabe, Hg. v. Leonard Keidel und Janina Reibold. Durchsuchbare Online-Ausgabe der Briefe von und an Johann Georg Hamann."; + ViewData["showCredits"] = "false"; +} + +
+
+ + @* File Category Page File List *@ + @* + *@ + @if (Model.ManagedFiles != null && Model.ManagedFiles.Any()) { +
+ + @foreach (var f in Model.ManagedFiles) { + + + + @if (f.IsValid) { + + } else { + + } + + } +
@f.FileName@f.GetLastModified()Valid! @f.GetLog()@f.GetLog()
+
+ } else { +
+ Keine Dateien im Repository gefunden! +
+ } + + + + +
+ Verfügbare Dateien + @if(Model.HamannFiles != null && Model.HamannFiles.Any()) { +
+
+ @foreach (var file in Model.HamannFiles) { +
+ @if (Model.ActiveFile != null) { + + } else { + + } +
@file.Name
+ @if (Model.ActiveFile != null && file.Name == Model.ActiveFile!.Name) { +
+
in Verwendung
+
+ } + @* // TODO Metadata + // TODO DELETE *@ +
@file.LastModified.LocalDateTime
+
+ } +
+ @if (!Model.ValidState) { +
+ Status nicht validiert! Daten können nicht auf der Webseite angezeigt werden! +
+ } + + +
+ } + else { +
Keine Dateien gefunden! Es wird eine fallback-Datei verwendet!
+ } +
\ No newline at end of file diff --git a/HaWeb/Views/Shared/_FileListForm.cshtml b/HaWeb/Views/Shared/_FileListForm.cshtml deleted file mode 100644 index 88f8000..0000000 --- a/HaWeb/Views/Shared/_FileListForm.cshtml +++ /dev/null @@ -1,53 +0,0 @@ -@model (List? files, string title, string aspcontrolller, string aspaction, string id, string downloadprefix, bool multipleallowed); - - - - -
- @Model.title - @if(Model.files != null && Model.files.Any()) { -
-
- @foreach (var file in Model.files.OrderByDescending(x => x.LastModified)) { -
- @if (Model.multipleallowed) { - - } - else { - - } -
@file.FileName
- @if (file.InProduction || file.IsUsed) { -
- @if (file.InProduction) { -
in Verwendung
- } - @if (file.IsUsed) { -
geladen
- } -
- } - @if (file.Fields != null && file.Fields.Any()) { -
- @foreach (var field in file.Fields) { - @if (field.Item2 != null) { -
field.Item2
- } - } -
- } -
@file.LastModified - @file.Prefix
-
- } -
- - -
- } - else { -
Keine Dateien gefunden! Es wird eine fallback-Datei verwendet!
- } -
\ No newline at end of file diff --git a/HaWeb/Views/Shared/_PublishForm.cshtml b/HaWeb/Views/Shared/_PublishForm.cshtml index a52bae5..278a552 100644 --- a/HaWeb/Views/Shared/_PublishForm.cshtml +++ b/HaWeb/Views/Shared/_PublishForm.cshtml @@ -1,10 +1,10 @@ -@model UploadViewModel; +@model XMLStateViewModel; -@if (Model.UsedFiles != null && Model.UsedFiles.Any()) { +@* @if (Model. != null && Model..Any()) {
Aktuell geladene Dateien
- @foreach (var (category, files) in Model.UsedFiles.OrderBy(x => x.Key)) + @foreach (var (category, files) in Model..OrderBy(x => x.Key)) { @@ -32,4 +32,4 @@ -} \ No newline at end of file +} *@ \ No newline at end of file diff --git a/HaWeb/Views/Shared/_UploadForm.cshtml b/HaWeb/Views/Shared/_UploadForm.cshtml index 0d7bd0b..0b1be9f 100644 --- a/HaWeb/Views/Shared/_UploadForm.cshtml +++ b/HaWeb/Views/Shared/_UploadForm.cshtml @@ -1,4 +1,4 @@ -@model UploadViewModel; +@model XMLStateViewModel; diff --git a/HaWeb/XMLParser/IXMLService.cs b/HaWeb/XMLParser/IXMLInteractionService.cs similarity index 51% rename from HaWeb/XMLParser/IXMLService.cs rename to HaWeb/XMLParser/IXMLInteractionService.cs index fc8bfb1..782904e 100644 --- a/HaWeb/XMLParser/IXMLService.cs +++ b/HaWeb/XMLParser/IXMLInteractionService.cs @@ -5,21 +5,17 @@ using HaWeb.Models; using HaDocument.Interfaces; using HaDocument.Models; using HaXMLReader.Interfaces; +using Microsoft.Extensions.FileProviders; -public interface IXMLService { +public interface IXMLInteractionService { + public XElement? TryCreate(); + public bool GetValidState(); + public void Collect(List Files); + public Dictionary? GetLoaded(); public IXMLRoot? GetRoot(string name); public List? GetRootsList(); - public Dictionary? GetRootsDictionary(); - public List? ProbeFile(XDocument document, ModelStateDictionary ModelState); - public Dictionary? GetUsedDictionary(); - public XElement? MergeUsedDocuments(ModelStateDictionary ModelState); - public void Use(XMLRootDocument doc); - public void AutoUse(string prefix); - public void AutoUse(FileList filelist); - public Dictionary? GetInProduction(); - public void UnUse(string prefix); - public void SetInProduction(); - public void SetInProduction(XDocument document); + public void CreateSearchables(XDocument document); + public List? GetManagedFiles(); public List<(string Index, List<(string Page, string Line, string Preview, string Identifier)> Results)>? SearchCollection(string collection, string searchword, IReaderService reader, ILibrary? lib); public List<(string Index, List<(string Page, string Line, string Preview, string Identifier)> Results)>? GetPreviews(List<(string, List)> places, IReaderService reader, ILibrary lib); } \ No newline at end of file diff --git a/HaWeb/XMLParser/IXMLRoot.cs b/HaWeb/XMLParser/IXMLRoot.cs index 43b41d6..75b4863 100644 --- a/HaWeb/XMLParser/IXMLRoot.cs +++ b/HaWeb/XMLParser/IXMLRoot.cs @@ -58,8 +58,5 @@ public interface IXMLRoot { // }); // return ret; // } - - public abstract XElement CreateHamannDocument(XElement element); - public abstract void MergeIntoFile(XElement file, XMLRootDocument document); } \ No newline at end of file diff --git a/HaWeb/XMLParser/XMLService.cs b/HaWeb/XMLParser/XMLInteractionService.cs similarity index 58% rename from HaWeb/XMLParser/XMLService.cs rename to HaWeb/XMLParser/XMLInteractionService.cs index cada1d3..9c396ed 100644 --- a/HaWeb/XMLParser/XMLService.cs +++ b/HaWeb/XMLParser/XMLInteractionService.cs @@ -1,123 +1,169 @@ -namespace HaWeb.XMLParser; +using System.Collections.Concurrent; +using System.Diagnostics; +using System.Text; +using System.Xml; using System.Xml.Linq; using System.Xml.XPath; -using Microsoft.AspNetCore.Mvc.ModelBinding; -using HaWeb.Models; -using HaWeb.SearchHelpers; -using System.Collections.Concurrent; -using System.Threading.Tasks; -using System.Text; -using HaXMLReader.Interfaces; using HaDocument.Interfaces; using HaDocument.Models; +using HaWeb.FileHelpers; +using HaWeb.Models; +using HaWeb.SearchHelpers; +using HaWeb.XMLParser; using HaWeb.XMLTests; +using HaXMLReader.Interfaces; +using Microsoft.Extensions.FileProviders; -// XMLService provides a wrapper around the loaded and used XML data -public class XMLService : IXMLService { - private Dictionary? _Used; - private Dictionary? _Roots; - private Dictionary? _Collections; +// Conditions for Successful create +// All types there +// Merging Success +// Saving Success +// Loading Success - private Stack>? _InProduction; +// Startup (BEFORE IXMLFileProvider, After IHaDocumentWrapper) +public class XMLInteractionService : IXMLInteractionService { + private readonly IXMLTestService _testService; + private readonly long _fileSizeLimit; + private readonly string[] _allowedExtensions = { ".xml" }; + private readonly static XmlReaderSettings _xmlSettings = new XmlReaderSettings() { + CloseInput = true, + CheckCharacters = false, + ConformanceLevel = ConformanceLevel.Fragment, + IgnoreComments = true, + IgnoreProcessingInstructions = true, + IgnoreWhitespace = false + }; - private Dictionary? _collectedProduction; - private Dictionary? _collectedUsed; + private Dictionary? _RootDefs; + private Dictionary? _CollectionDefs; - public XMLService() { - // Getting all classes which implement IXMLRoot for possible document endpoints + private List? _ManagedFiles; + private Dictionary? _Loaded; + private Dictionary? _Collection; + + private bool _ValidState = false; + + public XMLInteractionService(IConfiguration config, IXMLTestService testService) { + _testService = testService; + _fileSizeLimit = config.GetValue("FileSizeLimit"); var roottypes = _GetAllTypesThatImplementInterface().ToList(); roottypes.ForEach( x => { - if (this._Roots == null) this._Roots = new Dictionary(); + if (this._RootDefs == null) this._RootDefs = new Dictionary(); var instance = (IXMLRoot)Activator.CreateInstance(x)!; - if (instance != null) this._Roots.Add(instance.Prefix, instance); + if (instance != null) this._RootDefs.Add(instance.Prefix, instance); }); var collectiontypes = _GetAllTypesThatImplementInterface().ToList(); collectiontypes.ForEach( x => { - if (this._Collections == null) this._Collections = new Dictionary(); + if (this._CollectionDefs == null) this._CollectionDefs = new Dictionary(); var instance = (IXMLCollection)Activator.CreateInstance(x)!; - if (instance != null && instance.IsGlobal()) this._Collections.Add(instance.Key, instance); + if (instance != null && instance.IsGlobal()) this._CollectionDefs.Add(instance.Key, instance); }); - if (_Roots == null || !_Roots.Any()) + if (_RootDefs == null || !_RootDefs.Any()) throw new Exception("No classes for upload endpoints were found!"); - if (_Collections == null || !_Collections.Any()) + if (_CollectionDefs == null || !_CollectionDefs.Any()) throw new Exception("No classes for object collection were found!"); } + // Getters and Setters + public Dictionary? GetLoaded() => this._Loaded; + + public List? GetManagedFiles() => this._ManagedFiles; + + public List? GetRootsList() => this._RootDefs == null ? null : this._RootDefs.Values.ToList(); + + public bool GetValidState() => this._ValidState; + public IXMLRoot? GetRoot(string name) { - if (_Roots == null) return null; - _Roots.TryGetValue(name, out var root); + if (_RootDefs == null) return null; + _RootDefs.TryGetValue(name, out var root); return root; } - public List? GetRootsList() => this._Roots == null ? null : this._Roots.Values.ToList(); - - public Dictionary? GetRootsDictionary() => this._Roots == null ? null : this._Roots; - - public Dictionary? GetInProduction() { - if (_InProduction == null) return null; - return this._InProduction.Peek(); - } - - public void SetInProduction() { - if (_Used == null) return; - var inProduction = new Dictionary(); - foreach (var category in _Used) { - if (category.Value == null || category.Value.GetFileList() == null || !category.Value.GetFileList()!.Any()) - return; - inProduction.Add(category.Key, category.Value); - } - - if(_InProduction == null) _InProduction = new Stack>(); - _InProduction.Push(inProduction); - } - - public void SetInProduction(XDocument document) { - if (document == null || _Roots == null) return; - int numProcs = Environment.ProcessorCount; - int concurrencyLevel = numProcs * 2; - int startingSize = 2909; - int startingSizeAllCollections = 23; - var ret = new ConcurrentDictionary(concurrencyLevel, startingSizeAllCollections); - - if (_Collections != null) - Parallel.ForEach(_Collections, (coll) => { - var elem = coll.Value.xPath.Aggregate(new List(), (x, y) => { x.AddRange(document.XPathSelectElements(y).ToList()); return x; } ); - if (elem != null && elem.Any()) { - var items = new ConcurrentDictionary(concurrencyLevel, startingSize); - foreach (var e in elem) { - var k = coll.Value.GenerateKey(e); - if (k != null) { - var searchtext = coll.Value.Searchable ? - StringHelpers.NormalizeWhiteSpace(e.ToString(), ' ', false) : - null; - var datafileds = coll.Value.GenerateDataFields != null ? - coll.Value.GenerateDataFields(e) : - null; - items[k] = new CollectedItem(k, e, coll.Value, datafileds, searchtext); - } - } - if (items.Any()) { - if (!ret.ContainsKey(coll.Key)) - ret[coll.Key] = new ItemsCollection(coll.Key, coll.Value); - foreach (var item in items) - ret[coll.Key].Items.Add(item.Key, item.Value); + // Functions + public void Collect(List files) { + if (files == null || !files.Any()) return; + _ValidState = true; + List res = new List(); + foreach (var f in files) { + var sb = new StringBuilder(); + var m = _CreateFileModel(f, null); + res.Add(m); + // 1. Open File for Reading + try { + using (Stream file = f.CreateReadStream()) { + // 2. Some security checks, if file empty, wrong start, wrong extension, too big + if (!XMLFileHelpers.ProcessFile(file, f.Name, sb, _allowedExtensions, _fileSizeLimit)) { + m.Log(sb.ToString()); + continue; } } - }); + } catch { + m.Log( "Datei konnte nicht geöffnet werden."); + continue; + } - if (ret.Any()) { - Parallel.ForEach(ret, (collection) => { - collection.Value.GenerateGroupings(); - }); + // 3. Check validity of XML + try { + using (var xmlreader = XmlReader.Create(f.CreateReadStream(), _xmlSettings)) { + var doc = XDocument.Load(xmlreader, LoadOptions.PreserveWhitespace | LoadOptions.SetLineInfo); + + // 4. Check if opus-Document + // TODO: Unter der HOOD werden in ProbeFiles noch eigene Files gebaut! + var docs = _ProbeFile(doc, m); + if (docs == null || !docs.Any()) continue; + + // Success! File can be recognized and parsed. + m.Validate(); + foreach (var d in docs) { + if (_Loaded == null) _Loaded = new Dictionary(); + if (!_Loaded.ContainsKey(d.Prefix)) _Loaded.Add(d.Prefix, new FileList(d.XMLRoot)); + _Loaded[d.Prefix]!.Add(d); + } + } + } catch (Exception ex) { + m.Log($"Ungültiges XML: {ex.Message}"); + continue; + } } - _collectedProduction = ret.ToDictionary(x => x.Key, y => y.Value); + if(res.Any()) this._ManagedFiles = res; + + // Set validity + foreach (var f in _ManagedFiles) { + if (!f.IsValid) _ValidState = false; + break; + } + + // TODO: Speed up this: + var sw = new Stopwatch(); + sw.Start(); + _testService.Test(this); + sw.Stop(); + Console.WriteLine("Syntaxcheck " + sw.ElapsedMilliseconds.ToString() + " ms"); + } + + public XElement? TryCreate() { + if (_Loaded == null || !_Loaded.Any() || _RootDefs == null || !_RootDefs.Any() || !_ValidState) return null; + var opus = new XElement("opus"); + // TODO: Workaround for bug in HaDocument: roots have to be added in a specific order + var used = _Loaded.OrderByDescending(x => x.Key); + foreach (var category in used) { + if (category.Value == null || category.Value.GetFileList() == null || !category.Value.GetFileList()!.Any()) { + return null; + } + var documents = category.Value.GetFileList(); + foreach (var document in documents!) { + document.XMLRoot.MergeIntoFile(opus, document); + } + } + return opus; } public List<(string Index, List<(string Page, string Line, string Preview, string Identifier)> Results)>? GetPreviews(List<(string, List)> places, IReaderService reader, ILibrary lib) { - var searchableObjects = _collectedProduction["letters"].Items; + if (!_Collection.ContainsKey("letters")) return null; + var searchableObjects = _Collection["letters"].Items; var res = new ConcurrentBag<(string Index, List<(string Page, string Line, string preview, string identifier)> Results)>(); Parallel.ForEach(places, (obj) => { @@ -148,8 +194,8 @@ public class XMLService : IXMLService { } public List<(string Index, List<(string Page, string Line, string Preview, string Identifier)> Results)>? SearchCollection(string collection, string searchword, IReaderService reader, ILibrary lib) { - if (!_collectedProduction.ContainsKey(collection)) return null; - var searchableObjects = _collectedProduction[collection].Items; + if (!_Collection.ContainsKey(collection)) return null; + var searchableObjects = _Collection[collection].Items; var res = new ConcurrentBag<(string Index, List<(string Page, string Line, string preview, string identifier)> Results)>(); var sw = StringHelpers.NormalizeWhiteSpace(searchword.Trim()); @@ -202,110 +248,88 @@ public class XMLService : IXMLService { return res.ToList(); } - public List? ProbeFile(XDocument document, ModelStateDictionary ModelState) { - if (document.Root!.Name != "opus") { - ModelState.AddModelError("Error", "A valid Hamann-Docuemnt must begin with "); - return null; - } + public void CreateSearchables(XDocument document) { + if (document == null || _RootDefs == null) return; + int numProcs = Environment.ProcessorCount; + int concurrencyLevel = numProcs * 2; + int startingSize = 2909; + int startingSizeAllCollections = 23; + var ret = new ConcurrentDictionary(concurrencyLevel, startingSizeAllCollections); - List? res = null; - if (document.Root != null && _Roots != null) { - foreach (var (_, root) in _Roots) { - var elements = root.IsTypeOf(document.Root); - if (elements != null && elements.Any()) - foreach (var elem in elements) { - if (res == null) res = new List(); - res.Add(_createXMLRootDocument(root, elem)); + if (_CollectionDefs != null) + Parallel.ForEach(_CollectionDefs, (coll) => { + var elem = coll.Value.xPath.Aggregate(new List(), (x, y) => { x.AddRange(document.XPathSelectElements(y).ToList()); return x; } ); + if (elem != null && elem.Any()) { + var items = new ConcurrentDictionary(concurrencyLevel, startingSize); + foreach (var e in elem) { + var k = coll.Value.GenerateKey(e); + if (k != null) { + var searchtext = coll.Value.Searchable ? + StringHelpers.NormalizeWhiteSpace(e.ToString(), ' ', false) : + null; + var datafileds = coll.Value.GenerateDataFields != null ? + coll.Value.GenerateDataFields(e) : + null; + items[k] = new CollectedItem(k, e, coll.Value, datafileds, searchtext); + } + } + if (items.Any()) { + if (!ret.ContainsKey(coll.Key)) + ret[coll.Key] = new ItemsCollection(coll.Key, coll.Value); + foreach (var item in items) + ret[coll.Key].Items.Add(item.Key, item.Value); } - } - } - if (res == null) ModelState.AddModelError("Error", "Kein zum Hamann-Briefe-Projekt passendes XML gefunden."); - return res; - } - - public Dictionary? GetUsedDictionary() - => this._Used; - - - // Adds a document and sets it to used - public void Use(XMLRootDocument doc) { - if (_Used == null) _Used = new Dictionary(); - if (!_Used.ContainsKey(doc.Prefix)) _Used.Add(doc.Prefix, new FileList(doc.XMLRoot)); - _Used[doc.Prefix]!.Add(doc); - _ = doc.GetElement(); - } - - public void UnUse(string prefix) { - if (_Used != null && _Used.ContainsKey(prefix)) { - // Unload the Elements so unused files don't use up the memory. - if (_Used[prefix]!.GetFileList() != null) { - foreach (var e in _Used[prefix]!.GetFileList()) { - e.UnUse(); } - } - _Used.Remove(prefix); + }); + + if (ret.Any()) { + Parallel.ForEach(ret, (collection) => { + collection.Value.GenerateGroupings(); + }); } - - return; + _Collection = ret.ToDictionary(x => x.Key, y => y.Value); } - // Performs detection of using on the specified document type - public void AutoUse(string prefix) { - if (_Used == null || !_Used.ContainsKey(prefix)) return; - AutoUse(_Used[prefix]!); - } - - // Performs detection of using given a list of files - public void AutoUse(FileList filelist) { - FileList? res = null; - var list = filelist.GetFileList(); - var prefix = filelist.XMLRoot.Prefix; - - if (list == null) return; - if (_Used != null && _Used.ContainsKey(prefix)) _Used.Remove(prefix); - - // TODO: Item1 - var lookup = list.ToLookup(x => x.IdentificationString.Item2); - foreach (var idstring in lookup) { - var ordered = idstring.OrderBy(x => x.Date); - if (res == null) res = new FileList(filelist.XMLRoot); - Use(ordered.Last()); - } - } - - public XElement? MergeUsedDocuments(ModelStateDictionary ModelState) { - if (_Used == null || _Roots == null) { - ModelState.AddModelError("Error", "Keine Dokumente ausgewählt"); - return null; - } - - var opus = new XElement("opus"); - // TODO: Workaround for bug in HaDocument: roots have to be added in a specific order - var used = _Used.OrderByDescending(x => x.Key); - foreach (var category in used) { - if (category.Value == null || category.Value.GetFileList() == null || !category.Value.GetFileList()!.Any()) { - ModelState.AddModelError("Error", _Roots![category.Key].Type + " nicht vorhanden."); - return null; - } - var documents = category.Value.GetFileList(); - foreach (var document in documents!) { - document.XMLRoot.MergeIntoFile(opus, document); - } - } - - return opus; - } - - private XMLRootDocument _createXMLRootDocument(IXMLRoot Root, XElement element) { - var doc = new XMLRootDocument(Root, Root.Prefix, Root.GenerateIdentificationString(element), element); - doc.Fields = Root.GenerateFields(doc); - return doc; - } - - private IEnumerable _GetAllTypesThatImplementInterface() - { + private IEnumerable _GetAllTypesThatImplementInterface() { return System.Reflection.Assembly.GetExecutingAssembly() .GetTypes() .Where(type => typeof(T).IsAssignableFrom(type) && !type.IsInterface); } + + private List? _ProbeFile(XDocument document, FileModel file) { + if (document.Root!.Name != "opus") { + file.Log("Ein gültiges Dokument muss mit beginnen."); + return null; + } + + List? res = null; + if (document.Root != null && _RootDefs != null) { + foreach (var (_, root) in _RootDefs) { + var elements = root.IsTypeOf(document.Root); + if (elements != null && elements.Any()) + foreach (var elem in elements) { + if (res == null) res = new List(); + res.Add(_createXMLRootDocument(root, elem, file)); + } + } + } + if (res == null) file.Log("Dokumenten-Typ nicht erkannt."); + return res; + } + + private XMLRootDocument _createXMLRootDocument(IXMLRoot Root, XElement element, FileModel file) { + var doc = new XMLRootDocument(Root, Root.Prefix, Root.GenerateIdentificationString(element), element, file); + doc.Fields = Root.GenerateFields(doc); + return doc; + } + + private FileModel _CreateFileModel(IFileInfo file, string? message) { + var m = new FileModel(file.Name, file); + if (!String.IsNullOrWhiteSpace(message)) { + m.Log(message); + } + return m; + } + + } \ No newline at end of file diff --git a/HaWeb/XMLTests/BackgroundServices.cs b/HaWeb/XMLTests/BackgroundServices.cs new file mode 100644 index 0000000..7aaa01d --- /dev/null +++ b/HaWeb/XMLTests/BackgroundServices.cs @@ -0,0 +1,138 @@ +namespace HaWeb.BackgroundTask; + +using System.Threading.Channels; + +public interface IBackgroundTaskQueue { + ValueTask QueueBackgroundWorkItemAsync(Func workItem); + ValueTask> DequeueAsync(CancellationToken cancellationToken); +} + +public interface IMonitorLoop { + public void StartMonitorLoop(); +} + +public class BackgroundTaskQueue : IBackgroundTaskQueue { + private readonly Channel> _queue; + + public BackgroundTaskQueue(int capacity) { + var options = new BoundedChannelOptions(capacity) { + FullMode = BoundedChannelFullMode.Wait + }; + _queue = Channel.CreateBounded>(options); + } + + public async ValueTask QueueBackgroundWorkItemAsync(Func workItem) { + if (workItem == null) { + throw new ArgumentNullException(nameof(workItem)); + } + await _queue.Writer.WriteAsync(workItem); + } + + public async ValueTask> DequeueAsync( + CancellationToken cancellationToken + ) { + var workItem = await _queue.Reader.ReadAsync(cancellationToken); + return workItem; + } +} + +public class QueuedHostedService : BackgroundService { + private readonly ILogger _logger; + public IBackgroundTaskQueue TaskQueue { get; } + + public QueuedHostedService(IBackgroundTaskQueue taskQueue, ILogger logger) { + TaskQueue = taskQueue; + _logger = logger; + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) { + await BackgroundProcessing(stoppingToken); + } + + private async Task BackgroundProcessing(CancellationToken stoppingToken) { + while (!stoppingToken.IsCancellationRequested) { + var workItem = await TaskQueue.DequeueAsync(stoppingToken); + + try { + await workItem(stoppingToken); + } + catch (Exception ex) { + _logger.LogError(ex, "Error occurred executing {WorkItem}.", nameof(workItem)); + } + } + } + + public override async Task StopAsync(CancellationToken stoppingToken) { + _logger.LogInformation("Queued Hosted Service is stopping."); + + await base.StopAsync(stoppingToken); + } +} + +public class MonitorLoop : IMonitorLoop { + private readonly IBackgroundTaskQueue _taskQueue; + private readonly ILogger _logger; + private readonly CancellationToken _cancellationToken; + + public MonitorLoop( + IBackgroundTaskQueue taskQueue, + ILogger logger, + IHostApplicationLifetime applicationLifetime + ) { + _taskQueue = taskQueue; + _logger = logger; + _cancellationToken = applicationLifetime.ApplicationStopping; + } + + public void StartMonitorLoop() { + _logger.LogInformation("MonitorAsync Loop is starting."); + + // Run a console user input loop in a background thread + Task.Run(async () => await MonitorAsync()); + } + + private async ValueTask MonitorAsync() { + while (!_cancellationToken.IsCancellationRequested) { + var keyStroke = Console.ReadKey(); + + if (keyStroke.Key == ConsoleKey.W) { + // Enqueue a background work item + await _taskQueue.QueueBackgroundWorkItemAsync(BuildWorkItem); + } + } + } + + private async ValueTask BuildWorkItem(CancellationToken token) { + // Simulate three 5-second tasks to complete + // for each enqueued work item + + int delayLoop = 0; + var guid = Guid.NewGuid().ToString(); + + _logger.LogInformation("Queued Background Task {Guid} is starting.", guid); + + while (!token.IsCancellationRequested && delayLoop < 3) { + try { + await Task.Delay(TimeSpan.FromSeconds(5), token); + } + catch (OperationCanceledException) { + // Prevent throwing if the Delay is cancelled + } + + delayLoop++; + + _logger.LogInformation( + "Queued Background Task {Guid} is running. " + "{DelayLoop}/3", + guid, + delayLoop + ); + } + + if (delayLoop == 3) { + _logger.LogInformation("Queued Background Task {Guid} is complete.", guid); + } + else { + _logger.LogInformation("Queued Background Task {Guid} was cancelled.", guid); + } + } +} diff --git a/HaWeb/XMLTests/HamannXPath.cs b/HaWeb/XMLTests/HamannXPath.cs new file mode 100644 index 0000000..e5c0a5c --- /dev/null +++ b/HaWeb/XMLTests/HamannXPath.cs @@ -0,0 +1,6 @@ +namespace HaWeb.XMLTests; + +public class HamannXPath { + public string[]? Documents { get; set; } + public string? XPath { get; set; } +} \ No newline at end of file diff --git a/HaWeb/XMLTests/ICollectionRule.cs b/HaWeb/XMLTests/ICollectionRule.cs index 08d9389..11c064a 100644 --- a/HaWeb/XMLTests/ICollectionRule.cs +++ b/HaWeb/XMLTests/ICollectionRule.cs @@ -4,8 +4,8 @@ using System.Xml.Linq; public interface ICollectionRule { public string Name { get; } - public string[] Bases { get; } - public string[] Backlinks { get; } + public HamannXPath[] Bases { get; } + public HamannXPath[] Backlinks { get; } public IEnumerable<(string, XElement, XMLRootDocument)> GenerateIdentificationStrings(IEnumerable<(XElement, XMLRootDocument)> List); public IEnumerable<(string, XElement, XMLRootDocument, bool)> GenerateBacklinkString(IEnumerable<(XElement, XMLRootDocument)> List); } \ No newline at end of file diff --git a/HaWeb/XMLTests/IXMLTestService.cs b/HaWeb/XMLTests/IXMLTestService.cs index 0f493e1..33d90d1 100644 --- a/HaWeb/XMLTests/IXMLTestService.cs +++ b/HaWeb/XMLTests/IXMLTestService.cs @@ -1,3 +1,5 @@ +using HaWeb.XMLParser; + namespace HaWeb.XMLTests; public interface IXMLTestService { @@ -5,5 +7,5 @@ public interface IXMLTestService { public Dictionary? Ruleset { get; } public Dictionary? CollectionRuleset { get; } - public void Test(); + public void Test(IXMLInteractionService _XMLService); } \ No newline at end of file diff --git a/HaWeb/XMLTests/InodeRule.cs b/HaWeb/XMLTests/InodeRule.cs index bac67ae..c3f30ec 100644 --- a/HaWeb/XMLTests/InodeRule.cs +++ b/HaWeb/XMLTests/InodeRule.cs @@ -2,8 +2,8 @@ namespace HaWeb.XMLTests; public interface INodeRule { public string Name { get; } - public string XPath { get; } + public HamannXPath XPath { get; } public string? uniquenessAttribute { get; } - public List<(string LinkAttribute, string RemoteElement, string RemoteAttribute)>? References { get; } + public List<(string LinkAttribute, HamannXPath RemoteElement, string RemoteAttribute)>? References { get; } public string[]? Attributes { get; } } \ No newline at end of file diff --git a/HaWeb/XMLTests/XMLTestService.cs b/HaWeb/XMLTests/XMLTestService.cs index ffad282..d3c28e3 100644 --- a/HaWeb/XMLTests/XMLTestService.cs +++ b/HaWeb/XMLTests/XMLTestService.cs @@ -2,12 +2,9 @@ namespace HaWeb.XMLTests; using HaWeb.XMLParser; public class XMLTestService : IXMLTestService { - private IXMLService _XMLService; public Dictionary? Ruleset { get; private set; } public Dictionary? CollectionRuleset { get; private set; } - public XMLTestService(IXMLService xmlService) { - _XMLService = xmlService; - + public XMLTestService() { var roottypes = _GetAllTypesThatImplementInterface().ToList(); roottypes.ForEach( x => { if (this.Ruleset == null) this.Ruleset = new Dictionary(); @@ -23,17 +20,10 @@ public class XMLTestService : IXMLTestService { }); } - public void Test() { - var docs = _XMLService.GetUsedDictionary(); + public void Test(IXMLInteractionService _XMLService) { + var docs = _XMLService.GetLoaded(); if (docs == null) return; - foreach (var d in docs.Values) { - var fl = d.GetFileList(); - if (fl == null) continue; - foreach (var v in fl) { - v.ResetLog(); - } - } - var tester = new XMLTester(this, _XMLService.GetUsedDictionary()); + var tester = new XMLTester(this, docs); tester.Test(); } diff --git a/HaWeb/XMLTests/XMLTester.cs b/HaWeb/XMLTests/XMLTester.cs index 73bd8ad..49f85c2 100644 --- a/HaWeb/XMLTests/XMLTester.cs +++ b/HaWeb/XMLTests/XMLTester.cs @@ -29,7 +29,7 @@ public class XMLTester { public void Test() { if (_Ruleset == null) return; - _IDs = new System.Collections.Generic.Dictionary>(); + _IDs = new Dictionary>(); foreach (var rule in _Ruleset) { buildIDs(rule.Value); checkRequiredAttributes(rule.Value); @@ -42,6 +42,7 @@ public class XMLTester { checkReferences(collectionrule.Value); } } + private void checkReferences(INodeRule rule) { if (rule.References == null || !rule.References.Any()) return; var elements = GetEvaluateXPath(rule.XPath); @@ -49,11 +50,11 @@ public class XMLTester { foreach (var e in elements) { foreach (var r in rule.References) { var hasattr = checkAttribute(e.Item1, r.LinkAttribute, e.Item2, false); - var keyname = r.RemoteElement + "-" + r.RemoteAttribute; + var keyname = r.RemoteElement.XPath + "-" + r.RemoteAttribute; if (_IDs != null && _IDs.ContainsKey(keyname) && hasattr) { var val = e.Item1.Attribute(r.LinkAttribute)!.Value; if (!_IDs[keyname].Contains(val)) { - e.Item2.Log(generateLogMessage(e.Item1) + "Verlinktes Element " + val + " nicht gefunden."); + e.Item2.File.Log(generateLogMessage(e.Item1) + "Verlinktes Element " + val + " nicht gefunden."); } } } @@ -68,7 +69,7 @@ public class XMLTester { if (elemens != null && elemens.Any()) { foreach(var r in rule.GenerateBacklinkString(elemens)) { if (!r.Item4 && !_CollectionIDs[rule.Name].Contains(r.Item1)) { - r.Item3.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden."); + r.Item3.File.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden."); } if (r.Item4) { var coll = _CollectionIDs[rule.Name]; @@ -76,7 +77,7 @@ public class XMLTester { var searchterm = items[0]; var found = coll.Where(x => x.StartsWith(searchterm)); if (items[0] == "NA" || found == null || !found.Any()) { - r.Item3.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden."); + r.Item3.File.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden."); } else { for (var i = 1; i < items.Length; i++) { if (items[i] == "NA") break; @@ -84,7 +85,7 @@ public class XMLTester { searchterm = searchterm + "-" + items[i]; found = found.Where(x => x.StartsWith(searchterm)); if (found == null || !found.Any()) - r.Item3.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden."); + r.Item3.File.Log(generateLogMessage(r.Item2) + "Verlinktes Element " + r.Item1 + " nicht gefunden."); } } } @@ -125,7 +126,7 @@ public class XMLTester { if (elemens != null && elemens.Any()) { foreach (var r in rule.GenerateIdentificationStrings(elemens)) { if (!hs.Add(r.Item1)) { - r.Item3.Log(generateLogMessage(r.Item2) + "Brief-Seite-Zeile " + r.Item1 + " mehrdeutig."); + r.Item3.File.Log(generateLogMessage(r.Item2) + "Brief-Seite-Zeile " + r.Item1 + " mehrdeutig."); } } } @@ -134,24 +135,24 @@ public class XMLTester { } } - private void checkUniqueness(string xpathelement, string attribute) { - if (_Documents == null || _IDs == null || _IDs.ContainsKey(xpathelement + "-" + attribute)) return; + private void checkUniqueness(HamannXPath xpathelement, string attribute) { + if (_Documents == null || _IDs == null || _IDs.ContainsKey(xpathelement.XPath + "-" + attribute)) return; var hs = new HashSet(); var elements = GetEvaluateXPath(xpathelement); if (elements != null) foreach (var e in elements) { if (checkAttribute(e.Item1, attribute, e.Item2)) { if (!hs.Add(e.Item1.Attribute(attribute)!.Value)) { - e.Item2.Log(generateLogMessage(e.Item1) + "Attributwert " + e.Item1.Attribute(attribute)!.Value + " doppelt."); + e.Item2.File.Log(generateLogMessage(e.Item1) + "Attributwert " + e.Item1.Attribute(attribute)!.Value + " doppelt."); } } } - _IDs.TryAdd(xpathelement + "-" + attribute, hs); + _IDs.TryAdd(xpathelement.XPath + "-" + attribute, hs); } private bool checkAttribute(XElement element, string attributename, XMLRootDocument doc, bool log = true) { if (!element.HasAttributes || element.Attribute(attributename) == null) { - if (log) doc.Log(generateLogMessage(element) + "Attribut " + attributename + " fehlt."); + if (log) doc.File.Log(generateLogMessage(element) + "Attribut " + attributename + " fehlt."); return false; } return true; @@ -165,19 +166,21 @@ public class XMLTester { ": "; } - private List<(XElement, XMLRootDocument)>? GetEvaluateXPath(string xpath) { - if (_XPathEvaluated.ContainsKey(xpath)) return _XPathEvaluated[xpath]; - if (!_XPathEvaluated.ContainsKey(xpath)) _XPathEvaluated.Add(xpath, null); - if (_Documents == null) return null; + // Cache for XPATH evaluation + private List<(XElement, XMLRootDocument)>? GetEvaluateXPath(HamannXPath xpath) { + if (_Documents == null || xpath == null) return null; + if (_XPathEvaluated.ContainsKey(xpath.XPath)) return _XPathEvaluated[xpath.XPath]; + if (!_XPathEvaluated.ContainsKey(xpath.XPath)) _XPathEvaluated.Add(xpath.XPath, null); foreach (var d in _Documents) { - var elements = d.GetElement().XPathSelectElements(xpath).ToList(); + if (xpath.Documents != null && !xpath.Documents.Contains(d.Prefix)) continue; + var elements = d.Element.XPathSelectElements("." + xpath.XPath).ToList(); if (elements != null && elements.Any()) { - if (_XPathEvaluated[xpath] == null) _XPathEvaluated[xpath] = new List<(XElement, XMLRootDocument)>(); + if (_XPathEvaluated[xpath.XPath] == null) _XPathEvaluated[xpath.XPath] = new List<(XElement, XMLRootDocument)>(); foreach (var res in elements) { - _XPathEvaluated[xpath]!.Add((res, d)); + _XPathEvaluated[xpath.XPath]!.Add((res, d)); } } } - return _XPathEvaluated[xpath]; + return _XPathEvaluated[xpath.XPath]; } } \ No newline at end of file diff --git a/HaWeb/appsettings.json b/HaWeb/appsettings.json index c8f9694..f3e7ad2 100644 --- a/HaWeb/appsettings.json +++ b/HaWeb/appsettings.json @@ -7,14 +7,16 @@ }, "FeatureManagement": { "AdminService": true, - "UploadService": true, - "LocalPublishService": true, - "RemotePublishService": false, - "RemotePublishSourceService": false + "LocalPublishService": true }, "AllowedHosts": "*", - "StoredFilePathLinux": "/home/simon/Downloads/test/", - "StoredFilePathWindows": "C:/Users/simon/Downloads/test/", + "HamannFileStoreLinux": "/home/simon/Downloads/test/", + "HamannFileStoreWindows": "C:/Users/simon/Downloads/test/", + "BareRepositoryPathLinux": "/home/simon/Downloads/test/", + "BareRepositoryPathWindows": "C:/Users/simon/source/hamann-xml/.git/", + "WorkingTreePathLinux": "/home/simon/Downloads/test/", + "WorkingTreePathWindows": "C:/Users/simon/source/hamann-xml/", + "RepositoryBranch": "main", "StoredPDFPathWindows": "", "StoredPDFPathLinux": "", "FileSizeLimit": 52428800, diff --git a/HaWeb/omnisharp.json b/HaWeb/omnisharp.json deleted file mode 100644 index 6048437..0000000 --- a/HaWeb/omnisharp.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "FormattingOptions": { - "NewLinesForBracesInLambdaExpressionBody": false, - "NewLinesForBracesInAnonymousMethods": false, - "NewLinesForBracesInAnonymousTypes": false, - "NewLinesForBracesInControlBlocks": false, - "NewLinesForBracesInTypes": false, - "NewLinesForBracesInMethods": false, - "NewLinesForBracesInProperties": false, - "NewLinesForBracesInObjectCollectionArrayInitializers": false, - "NewLinesForBracesInAccessors": false, - "NewLineForElse": false, - "NewLineForCatch": false, - "NewLineForFinally": false - } -} \ No newline at end of file diff --git a/HaWeb/wwwroot/js/filelistform.js b/HaWeb/wwwroot/js/filelistform.js index 6db8327..7e87943 100644 --- a/HaWeb/wwwroot/js/filelistform.js +++ b/HaWeb/wwwroot/js/filelistform.js @@ -1,3 +1,9 @@ +function getCookie(name) { + var value = "; " + document.cookie; + var parts = value.split("; " + name + "="); + if (parts.length == 2) return parts.pop().split(";").shift(); +} + const USESubmit = async function (oFormElement, file = null) { let fd = new FormData(oFormElement); document.getElementById("ha-filelistbutton").style.pointerEvents = "none"; diff --git a/HaWeb/wwwroot/js/upload.js b/HaWeb/wwwroot/js/upload.js deleted file mode 100644 index ddeb8e3..0000000 --- a/HaWeb/wwwroot/js/upload.js +++ /dev/null @@ -1,22 +0,0 @@ -const hideshowfiles = function() { - let elem = document.getElementById("ha-availablefileslist"); - if (elem.classList.contains('hidden')) { - - elem.classList.remove('hidden'); - elem.classList.add('block'); - } - else { - elem.classList.add('hidden'); - elem.classList.remove('block'); - } - } - - function getCookie(name) { - var value = "; " + document.cookie; - var parts = value.split("; " + name + "="); - if (parts.length == 2) return parts.pop().split(";").shift(); -} - -var filesbutton = document.getElementById("ha-availablefiles"); -if (filesbutton !== null) - filesbutton.addEventListener("click", () => hideshowfiles());
@Model.AvailableRoots.Where(x => x.Prefix == category).First().Type: