Moved some files around, fixed Log file fetching to use zip and work more reliably. Refactored some code in directory service.
This commit is contained in:
parent
bbb4240e20
commit
ecfb40cb2d
15 changed files with 128 additions and 47 deletions
|
|
@ -92,7 +92,8 @@ namespace API.Services
|
|||
|
||||
public void ClearDirectory(string directoryPath)
|
||||
{
|
||||
DirectoryInfo di = new DirectoryInfo(directoryPath);
|
||||
var di = new DirectoryInfo(directoryPath);
|
||||
if (!di.Exists) return;
|
||||
|
||||
foreach (var file in di.EnumerateFiles())
|
||||
{
|
||||
|
|
@ -156,7 +157,7 @@ namespace API.Services
|
|||
|
||||
return new ImageDto
|
||||
{
|
||||
Content = await File.ReadAllBytesAsync(imagePath),
|
||||
Content = await ReadFileAsync(imagePath),
|
||||
Filename = Path.GetFileNameWithoutExtension(imagePath),
|
||||
FullPath = Path.GetFullPath(imagePath),
|
||||
Width = image.Width,
|
||||
|
|
@ -165,6 +166,12 @@ namespace API.Services
|
|||
};
|
||||
}
|
||||
|
||||
public async Task<byte[]> ReadFileAsync(string path)
|
||||
{
|
||||
if (!File.Exists(path)) return Array.Empty<byte>();
|
||||
return await File.ReadAllBytesAsync(path);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue