优化日志加载和清理功能,增加流式读取和定期清理日志服务
All checks were successful
Docker Build & Deploy / Build Docker Image (push) Successful in 24s
Docker Build & Deploy / Deploy to Production (push) Successful in 6s

This commit is contained in:
孙诚
2025-12-30 11:07:14 +08:00
parent 0f52806569
commit 8ba279e957
5 changed files with 287 additions and 39 deletions

View File

@@ -0,0 +1,106 @@
using Microsoft.Extensions.Hosting;
namespace Service;
/// <summary>
/// 日志清理后台服务
/// </summary>
public class LogCleanupService(ILogger<LogCleanupService> logger) : BackgroundService
{
private readonly TimeSpan _checkInterval = TimeSpan.FromHours(24); // 每24小时检查一次
private const int RetentionDays = 30; // 保留30天的日志
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
logger.LogInformation("日志清理服务已启动");
// 启动时立即执行一次清理
await CleanupOldLogsAsync();
// 定期清理
while (!stoppingToken.IsCancellationRequested)
{
try
{
await Task.Delay(_checkInterval, stoppingToken);
await CleanupOldLogsAsync();
}
catch (OperationCanceledException)
{
// 服务正在停止
break;
}
catch (Exception ex)
{
logger.LogError(ex, "清理日志时发生错误");
}
}
logger.LogInformation("日志清理服务已停止");
}
/// <summary>
/// 清理过期的日志文件
/// </summary>
private async Task CleanupOldLogsAsync()
{
await Task.Run(() =>
{
try
{
var logDirectory = Path.Combine(Directory.GetCurrentDirectory(), "logs");
if (!Directory.Exists(logDirectory))
{
logger.LogWarning("日志目录不存在: {LogDirectory}", logDirectory);
return;
}
var cutoffDate = DateTime.Now.AddDays(-RetentionDays);
var logFiles = Directory.GetFiles(logDirectory, "log-*.txt");
var deletedCount = 0;
foreach (var logFile in logFiles)
{
try
{
var fileName = Path.GetFileNameWithoutExtension(logFile);
var dateStr = fileName.Replace("log-", "");
// 尝试解析日期 (格式: yyyyMMdd)
if (DateTime.TryParseExact(dateStr, "yyyyMMdd",
System.Globalization.CultureInfo.InvariantCulture,
System.Globalization.DateTimeStyles.None,
out var logDate))
{
if (logDate < cutoffDate)
{
File.Delete(logFile);
deletedCount++;
logger.LogInformation("已删除过期日志文件: {LogFile} (日期: {LogDate})",
Path.GetFileName(logFile), logDate.ToString("yyyy-MM-dd"));
}
}
}
catch (Exception ex)
{
logger.LogError(ex, "删除日志文件失败: {LogFile}", logFile);
}
}
if (deletedCount > 0)
{
logger.LogInformation("日志清理完成,共删除 {DeletedCount} 个过期日志文件(保留 {RetentionDays} 天)",
deletedCount, RetentionDays);
}
else
{
logger.LogDebug("没有需要清理的过期日志文件");
}
}
catch (Exception ex)
{
logger.LogError(ex, "清理日志过程中发生错误");
}
});
}
}

View File

@@ -185,10 +185,17 @@ const loadLogs = async (reset = false) => {
total.value = response.total
// 判断是否还有更多数据
if (logList.value.length >= total.value || newLogs.length < pageSize.value) {
finished.value = true
// total = -1 表示总数未知,此时只根据返回数据量判断
if (total.value === -1) {
// 如果返回的数据少于请求的数量,说明没有更多了
finished.value = newLogs.length < pageSize.value
} else {
finished.value = false
// 如果有明确的总数,则判断是否已加载完全部数据
if (logList.value.length >= total.value || newLogs.length < pageSize.value) {
finished.value = true
} else {
finished.value = false
}
}
} else {
showToast(response.message || '获取日志失败')

View File

@@ -56,41 +56,15 @@ public class LogController(ILogger<LogController> logger) : ControllerBase
};
}
// 读取所有日志行(使用共享读取模式,允许其他进程写入
var allLines = await ReadAllLinesAsync(logFilePath);
var logEntries = new List<LogEntry>();
// 流式读取日志(边读边过滤,满足条件后停止
var (logEntries, total) = await ReadLogsStreamAsync(
logFilePath,
pageIndex,
pageSize,
searchKeyword,
logLevel);
foreach (var line in allLines)
{
if (string.IsNullOrWhiteSpace(line))
continue;
var logEntry = ParseLogLine(line);
if (logEntry != null)
{
// 应用筛选条件
if (!string.IsNullOrEmpty(searchKeyword) &&
!logEntry.Message.Contains(searchKeyword, StringComparison.OrdinalIgnoreCase))
{
continue;
}
if (!string.IsNullOrEmpty(logLevel) &&
!logEntry.Level.Equals(logLevel, StringComparison.OrdinalIgnoreCase))
{
continue;
}
logEntries.Add(logEntry);
}
}
// 倒序排列(最新的在前面)
logEntries.Reverse();
var total = logEntries.Count;
var skip = (pageIndex - 1) * pageSize;
var pagedData = logEntries.Skip(skip).Take(pageSize).ToList();
var pagedData = logEntries;
return new PagedResponse<LogEntry>
{
@@ -143,6 +117,55 @@ public class LogController(ILogger<LogController> logger) : ControllerBase
}
}
/// <summary>
/// 合并多行日志(已废弃,现在在流式读取中处理)
/// </summary>
[Obsolete("Use ReadLogsStreamAsync instead")]
private List<string> MergeMultiLineLog(string[] lines)
{
var mergedLines = new List<string>();
var currentLog = new System.Text.StringBuilder();
// 日志行开始的正则表达式
var logStartPattern = new System.Text.RegularExpressions.Regex(
@"^\[\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} [+-]\d{2}:\d{2}\]"
);
foreach (var line in lines)
{
if (string.IsNullOrWhiteSpace(line))
continue;
// 检查是否是新的日志条目
if (logStartPattern.IsMatch(line))
{
// 保存之前的日志
if (currentLog.Length > 0)
{
mergedLines.Add(currentLog.ToString());
currentLog.Clear();
}
currentLog.Append(line);
}
else
{
// 这是上一条日志的延续,添加换行符后追加
if (currentLog.Length > 0)
{
currentLog.Append('\n').Append(line);
}
}
}
// 添加最后一条日志
if (currentLog.Length > 0)
{
mergedLines.Add(currentLog.ToString());
}
return mergedLines;
}
/// <summary>
/// 解析单行日志
/// </summary>
@@ -181,6 +204,114 @@ public class LogController(ILogger<LogController> logger) : ControllerBase
}
}
/// <summary>
/// 流式读取日志(真正的流式:只读取需要的数据,满足后立即停止)
/// </summary>
private async Task<(List<LogEntry> entries, int total)> ReadLogsStreamAsync(
string path,
int pageIndex,
int pageSize,
string? searchKeyword,
string? logLevel)
{
var filteredEntries = new List<LogEntry>();
var currentLog = new System.Text.StringBuilder();
var logStartPattern = new System.Text.RegularExpressions.Regex(
@"^\[\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} [+-]\d{2}:\d{2}\]");
// 计算需要读取的最大条目数取最近的N条日志用于倒序分页
// 由于日志倒序显示,我们读取足够的数据以覆盖当前页
var maxEntriesToRead = pageIndex * pageSize + pageSize; // 多读一页用于判断是否有下一页
using var fileStream = new FileStream(
path,
FileMode.Open,
FileAccess.Read,
FileShare.ReadWrite);
using var streamReader = new StreamReader(fileStream);
string? line;
var readCount = 0;
while ((line = await streamReader.ReadLineAsync()) != null)
{
if (string.IsNullOrWhiteSpace(line))
continue;
// 检查是否是新的日志条目
if (logStartPattern.IsMatch(line))
{
// 处理之前累积的日志
if (currentLog.Length > 0)
{
var logEntry = ParseLogLine(currentLog.ToString());
if (logEntry != null && PassFilter(logEntry, searchKeyword, logLevel))
{
filteredEntries.Add(logEntry);
readCount++;
// 如果已读取足够数据,提前退出
if (readCount >= maxEntriesToRead)
{
break;
}
}
currentLog.Clear();
}
currentLog.Append(line);
}
else
{
// 这是上一条日志的延续
if (currentLog.Length > 0)
{
currentLog.Append('\n').Append(line);
}
}
}
// 处理最后一条日志(如果循环正常结束或刚好在日志边界退出)
if (currentLog.Length > 0 && readCount < maxEntriesToRead)
{
var logEntry = ParseLogLine(currentLog.ToString());
if (logEntry != null && PassFilter(logEntry, searchKeyword, logLevel))
{
filteredEntries.Add(logEntry);
}
}
// 倒序排列(最新的在前面)
filteredEntries.Reverse();
// 计算分页
var skip = (pageIndex - 1) * pageSize;
var pagedData = filteredEntries.Skip(skip).Take(pageSize).ToList();
// total 返回 -1 表示未知(避免扫描整个文件)
// 前端可以根据返回数据量判断是否有下一页
return (pagedData, -1);
}
/// <summary>
/// 检查日志条目是否通过过滤条件
/// </summary>
private bool PassFilter(LogEntry logEntry, string? searchKeyword, string? logLevel)
{
if (!string.IsNullOrEmpty(searchKeyword) &&
!logEntry.Message.Contains(searchKeyword, StringComparison.OrdinalIgnoreCase))
{
return false;
}
if (!string.IsNullOrEmpty(logLevel) &&
!logEntry.Level.Equals(logLevel, StringComparison.OrdinalIgnoreCase))
{
return false;
}
return true;
}
/// <summary>
/// 读取文件所有行(支持共享读取)
/// </summary>

View File

@@ -87,7 +87,7 @@ var fsql = new FreeSqlBuilder()
.UseMonitorCommand(
cmd =>
{
Log.Information("执行SQL: {Sql}", cmd.CommandText);
Log.Debug("执行SQL: {Sql}", cmd.CommandText);
}
)
.Build();
@@ -97,6 +97,9 @@ builder.Services.AddSingleton(fsql);
// 自动扫描注册服务和仓储
builder.Services.AddServices();
// 注册日志清理后台服务
builder.Services.AddHostedService<LogCleanupService>();
// 配置 Quartz.NET 定时任务
builder.AddScheduler();

View File

@@ -11,7 +11,7 @@
},
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Default": "Debug",
"Override": {
"Microsoft": "Warning",
"Microsoft.EntityFrameworkCore": "Warning"
@@ -26,6 +26,7 @@
"Args": {
"path": "logs/log-.txt",
"rollingInterval": "Day",
"retainedFileCountLimit": 30,
"outputTemplate": "[{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz}] [{Level:u3}] {Message:lj}{NewLine}{Exception}"
}
}