Logging Enhancements (#1521)

* Recreated Kavita Logging with Serilog instead of Default. This needs to be move out of the appsettings now, to allow auto updater to patch.

* Refactored the code to be completely configured via Code rather than appsettings.json. This is a required step for Auto Updating.

* Added in the ability to send logs directly to the UI only for users on the log route. Stopping implementation as Alerts page will handle the rest of the implementation.

* Fixed up the backup service to not rely on Config from appsettings.json

* Tweaked the Logging levels available

* Moved everything over to File-scoped namespaces

* Moved everything over to File-scoped namespaces

* Code cleanup, removed an old migration and changed so debug logging doesn't print sensitive db data

* Removed dead code
This commit is contained in:
Joseph Milazzo 2022-09-12 19:25:48 -05:00 committed by GitHub
parent 9f715cc35f
commit d1a14f7e68
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
212 changed files with 16599 additions and 16834 deletions

View file

@ -1,8 +1,7 @@
namespace API.Benchmark
namespace API.Benchmark;
public class ArchiveSerivceBenchmark
{
public class ArchiveSerivceBenchmark
{
// Benchmark to test default GetNumberOfPages from archive
// vs a new method where I try to open the archive and return said stream
}
// Benchmark to test default GetNumberOfPages from archive
// vs a new method where I try to open the archive and return said stream
}

View file

@ -5,75 +5,74 @@ using System.Text.RegularExpressions;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Order;
namespace API.Benchmark
namespace API.Benchmark;
[MemoryDiagnoser]
[Orderer(SummaryOrderPolicy.FastestToSlowest)]
[RankColumn]
public class ParserBenchmarks
{
[MemoryDiagnoser]
[Orderer(SummaryOrderPolicy.FastestToSlowest)]
[RankColumn]
public class ParserBenchmarks
private readonly IList<string> _names;
private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9]",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
TimeSpan.FromMilliseconds(300));
private static readonly Regex IsEpub = new Regex(@"\.epub",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
TimeSpan.FromMilliseconds(300));
public ParserBenchmarks()
{
private readonly IList<string> _names;
private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9]",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
TimeSpan.FromMilliseconds(300));
private static readonly Regex IsEpub = new Regex(@"\.epub",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
TimeSpan.FromMilliseconds(300));
public ParserBenchmarks()
{
// Read all series from SeriesNamesForNormalization.txt
_names = File.ReadAllLines("Data/SeriesNamesForNormalization.txt");
Console.WriteLine($"Performing benchmark on {_names.Count} series");
}
private static string Normalize(string name)
{
// ReSharper disable once UnusedVariable
var ret = NormalizeRegex.Replace(name, string.Empty).ToLower();
var normalized = NormalizeRegex.Replace(name, string.Empty).ToLower();
return string.IsNullOrEmpty(normalized) ? name : normalized;
}
[Benchmark]
public void TestNormalizeName()
{
foreach (var name in _names)
{
Normalize(name);
}
}
[Benchmark]
public void TestIsEpub()
{
foreach (var name in _names)
{
if ((name).ToLower() == ".epub")
{
/* No Operation */
}
}
}
[Benchmark]
public void TestIsEpub_New()
{
foreach (var name in _names)
{
if (Path.GetExtension(name).Equals(".epub", StringComparison.InvariantCultureIgnoreCase))
{
/* No Operation */
}
}
}
// Read all series from SeriesNamesForNormalization.txt
_names = File.ReadAllLines("Data/SeriesNamesForNormalization.txt");
Console.WriteLine($"Performing benchmark on {_names.Count} series");
}
private static string Normalize(string name)
{
// ReSharper disable once UnusedVariable
var ret = NormalizeRegex.Replace(name, string.Empty).ToLower();
var normalized = NormalizeRegex.Replace(name, string.Empty).ToLower();
return string.IsNullOrEmpty(normalized) ? name : normalized;
}
[Benchmark]
public void TestNormalizeName()
{
foreach (var name in _names)
{
Normalize(name);
}
}
[Benchmark]
public void TestIsEpub()
{
foreach (var name in _names)
{
if ((name).ToLower() == ".epub")
{
/* No Operation */
}
}
}
[Benchmark]
public void TestIsEpub_New()
{
foreach (var name in _names)
{
if (Path.GetExtension(name).Equals(".epub", StringComparison.InvariantCultureIgnoreCase))
{
/* No Operation */
}
}
}
}

View file

@ -1,22 +1,21 @@
using BenchmarkDotNet.Running;
namespace API.Benchmark
{
/// <summary>
/// To build this, cd into API.Benchmark directory and run
/// dotnet build -c Release
/// then copy the outputted dll
/// dotnet copied_string\API.Benchmark.dll
/// </summary>
public static class Program
{
private static void Main(string[] args)
{
//BenchmarkRunner.Run<ParseScannedFilesBenchmarks>();
//BenchmarkRunner.Run<TestBenchmark>();
//BenchmarkRunner.Run<ParserBenchmarks>();
BenchmarkRunner.Run<EpubBenchmark>();
namespace API.Benchmark;
/// <summary>
/// To build this, cd into API.Benchmark directory and run
/// dotnet build -c Release
/// then copy the outputted dll
/// dotnet copied_string\API.Benchmark.dll
/// </summary>
public static class Program
{
private static void Main(string[] args)
{
//BenchmarkRunner.Run<ParseScannedFilesBenchmarks>();
//BenchmarkRunner.Run<TestBenchmark>();
//BenchmarkRunner.Run<ParserBenchmarks>();
BenchmarkRunner.Run<EpubBenchmark>();
}
}
}

View file

@ -6,61 +6,60 @@ using API.Extensions;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Order;
namespace API.Benchmark
namespace API.Benchmark;
/// <summary>
/// This is used as a scratchpad for testing
/// </summary>
[MemoryDiagnoser]
[Orderer(SummaryOrderPolicy.FastestToSlowest)]
[RankColumn]
public class TestBenchmark
{
/// <summary>
/// This is used as a scratchpad for testing
/// </summary>
[MemoryDiagnoser]
[Orderer(SummaryOrderPolicy.FastestToSlowest)]
[RankColumn]
public class TestBenchmark
private static IEnumerable<VolumeDto> GenerateVolumes(int max)
{
private static IEnumerable<VolumeDto> GenerateVolumes(int max)
var random = new Random();
var maxIterations = random.Next(max) + 1;
var list = new List<VolumeDto>();
for (var i = 0; i < maxIterations; i++)
{
var random = new Random();
var maxIterations = random.Next(max) + 1;
var list = new List<VolumeDto>();
for (var i = 0; i < maxIterations; i++)
list.Add(new VolumeDto()
{
list.Add(new VolumeDto()
{
Number = random.Next(10) > 5 ? 1 : 0,
Chapters = GenerateChapters()
});
}
return list;
}
private static List<ChapterDto> GenerateChapters()
{
var list = new List<ChapterDto>();
for (var i = 1; i < 40; i++)
{
list.Add(new ChapterDto()
{
Range = i + string.Empty
});
}
return list;
}
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
{
foreach (var v in volumes.Where(vDto => vDto.Number == 0))
{
v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList();
}
}
[Benchmark]
public void TestSortSpecialChapters()
{
var volumes = GenerateVolumes(10);
SortSpecialChapters(volumes);
Number = random.Next(10) > 5 ? 1 : 0,
Chapters = GenerateChapters()
});
}
return list;
}
private static List<ChapterDto> GenerateChapters()
{
var list = new List<ChapterDto>();
for (var i = 1; i < 40; i++)
{
list.Add(new ChapterDto()
{
Range = i + string.Empty
});
}
return list;
}
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
{
foreach (var v in volumes.Where(vDto => vDto.Number == 0))
{
v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList();
}
}
[Benchmark]
public void TestSortSpecialChapters()
{
var volumes = GenerateVolumes(10);
SortSpecialChapters(volumes);
}
}