chore: sync local changes (2026-03-12)

This commit is contained in:
2026-03-12 18:58:53 +08:00
parent 74f15c282e
commit d861a9937b
38 changed files with 3570 additions and 2926 deletions

View File

@@ -1,302 +1,418 @@
package services
import (
"log"
"sync"
"time"
"mengyaping-backend/config"
"mengyaping-backend/models"
"mengyaping-backend/storage"
"mengyaping-backend/utils"
)
// MonitorService 监控服务
type MonitorService struct {
httpClient *utils.HTTPClient
storage *storage.Storage
stopCh chan struct{}
running bool
mu sync.Mutex
}
var (
monitorService *MonitorService
monitorOnce sync.Once
)
// GetMonitorService 获取监控服务单例
func GetMonitorService() *MonitorService {
monitorOnce.Do(func() {
cfg := config.GetConfig()
monitorService = &MonitorService{
httpClient: utils.NewHTTPClient(cfg.Monitor.Timeout),
storage: storage.GetStorage(),
stopCh: make(chan struct{}),
}
})
return monitorService
}
// Start 启动监控服务
func (s *MonitorService) Start() {
s.mu.Lock()
if s.running {
s.mu.Unlock()
return
}
s.running = true
s.mu.Unlock()
log.Println("监控服务已启动")
// 立即执行一次检测
go s.checkAll()
// 定时检测
cfg := config.GetConfig()
ticker := time.NewTicker(cfg.Monitor.Interval)
defer ticker.Stop()
for {
select {
case <-ticker.C:
go s.checkAll()
case <-s.stopCh:
log.Println("监控服务已停止")
return
}
}
}
// Stop 停止监控服务
func (s *MonitorService) Stop() {
s.mu.Lock()
defer s.mu.Unlock()
if s.running {
close(s.stopCh)
s.running = false
}
}
// checkAll 检查所有网站
func (s *MonitorService) checkAll() {
websites := s.storage.GetWebsites()
var wg sync.WaitGroup
semaphore := make(chan struct{}, 10) // 限制并发数
for _, website := range websites {
for _, urlInfo := range website.URLs {
wg.Add(1)
go func(w models.Website, u models.URLInfo) {
defer wg.Done()
semaphore <- struct{}{}
defer func() { <-semaphore }()
s.checkURL(w, u)
}(website, urlInfo)
}
}
wg.Wait()
// 保存记录
s.storage.SaveAll()
}
// checkURL 检查单个URL
func (s *MonitorService) checkURL(website models.Website, urlInfo models.URLInfo) {
result := s.httpClient.CheckWebsite(urlInfo.URL)
record := models.MonitorRecord{
WebsiteID: website.ID,
URLID: urlInfo.ID,
URL: urlInfo.URL,
StatusCode: result.StatusCode,
Latency: result.Latency.Milliseconds(),
IsUp: result.Error == nil && utils.IsSuccessStatus(result.StatusCode),
CheckedAt: time.Now(),
}
if result.Error != nil {
record.Error = result.Error.Error()
}
s.storage.AddRecord(record)
// 更新网站信息标题和Favicon
if result.Title != "" || result.Favicon != "" {
w := s.storage.GetWebsite(website.ID)
if w != nil {
needUpdate := false
if result.Title != "" && w.Title != result.Title {
w.Title = result.Title
needUpdate = true
}
if result.Favicon != "" && w.Favicon != result.Favicon {
w.Favicon = result.Favicon
needUpdate = true
}
if needUpdate {
w.UpdatedAt = time.Now()
s.storage.UpdateWebsite(*w)
}
}
}
log.Printf("检测 [%s] %s - 状态码: %d, 延迟: %dms, 可用: %v",
website.Name, urlInfo.URL, result.StatusCode, result.Latency.Milliseconds(), record.IsUp)
}
// CheckWebsiteNow 立即检查指定网站
func (s *MonitorService) CheckWebsiteNow(websiteID string) {
website := s.storage.GetWebsite(websiteID)
if website == nil {
return
}
for _, urlInfo := range website.URLs {
go s.checkURL(*website, urlInfo)
}
}
// GetWebsiteStatus 获取网站状态
func (s *MonitorService) GetWebsiteStatus(websiteID string) *models.WebsiteStatus {
website := s.storage.GetWebsite(websiteID)
if website == nil {
return nil
}
status := &models.WebsiteStatus{
Website: *website,
URLStatuses: []models.URLStatus{},
}
now := time.Now()
since24h := now.Add(-24 * time.Hour)
since7d := now.Add(-7 * 24 * time.Hour)
var totalUptime24h, totalUptime7d float64
var urlCount int
for _, urlInfo := range website.URLs {
urlStatus := s.getURLStatus(website.ID, urlInfo, since24h, since7d)
status.URLStatuses = append(status.URLStatuses, urlStatus)
totalUptime24h += urlStatus.Uptime24h
totalUptime7d += urlStatus.Uptime7d
urlCount++
}
if urlCount > 0 {
status.Uptime24h = totalUptime24h / float64(urlCount)
status.Uptime7d = totalUptime7d / float64(urlCount)
}
// 获取最后检测时间
for _, urlStatus := range status.URLStatuses {
if urlStatus.CurrentState.CheckedAt.After(status.LastChecked) {
status.LastChecked = urlStatus.CurrentState.CheckedAt
}
}
return status
}
// getURLStatus 获取URL状态
func (s *MonitorService) getURLStatus(websiteID string, urlInfo models.URLInfo, since24h, since7d time.Time) models.URLStatus {
urlStatus := models.URLStatus{
URLInfo: urlInfo,
}
// 获取最新记录
latest := s.storage.GetLatestRecord(websiteID, urlInfo.ID)
if latest != nil {
urlStatus.CurrentState = *latest
}
// 获取24小时记录
records24h := s.storage.GetRecords(websiteID, urlInfo.ID, since24h)
urlStatus.History24h = records24h
// 计算24小时可用率
if len(records24h) > 0 {
upCount := 0
var totalLatency int64
for _, r := range records24h {
if r.IsUp {
upCount++
}
totalLatency += r.Latency
}
urlStatus.Uptime24h = float64(upCount) / float64(len(records24h)) * 100
urlStatus.AvgLatency = totalLatency / int64(len(records24h))
}
// 获取7天记录并按小时统计
records7d := s.storage.GetRecords(websiteID, urlInfo.ID, since7d)
urlStatus.History7d = s.aggregateByHour(records7d)
// 计算7天可用率
if len(records7d) > 0 {
upCount := 0
for _, r := range records7d {
if r.IsUp {
upCount++
}
}
urlStatus.Uptime7d = float64(upCount) / float64(len(records7d)) * 100
}
return urlStatus
}
// aggregateByHour 按小时聚合记录
func (s *MonitorService) aggregateByHour(records []models.MonitorRecord) []models.HourlyStats {
hourlyMap := make(map[string]*models.HourlyStats)
for _, r := range records {
hourKey := r.CheckedAt.Truncate(time.Hour).Format(time.RFC3339)
if _, exists := hourlyMap[hourKey]; !exists {
hourlyMap[hourKey] = &models.HourlyStats{
Hour: r.CheckedAt.Truncate(time.Hour),
}
}
stats := hourlyMap[hourKey]
stats.TotalCount++
if r.IsUp {
stats.UpCount++
}
stats.AvgLatency += r.Latency
}
var result []models.HourlyStats
for _, stats := range hourlyMap {
if stats.TotalCount > 0 {
stats.AvgLatency /= int64(stats.TotalCount)
stats.Uptime = float64(stats.UpCount) / float64(stats.TotalCount) * 100
}
result = append(result, *stats)
}
return result
}
// GetAllWebsiteStatuses 获取所有网站状态
func (s *MonitorService) GetAllWebsiteStatuses() []models.WebsiteStatus {
websites := s.storage.GetWebsites()
var statuses []models.WebsiteStatus
for _, website := range websites {
status := s.GetWebsiteStatus(website.ID)
if status != nil {
statuses = append(statuses, *status)
}
}
return statuses
}
package services
import (
"log"
"sort"
"sync"
"time"
"mengyaping-backend/config"
"mengyaping-backend/models"
"mengyaping-backend/storage"
"mengyaping-backend/utils"
)
// MonitorService 监控服务
type MonitorService struct {
httpClient *utils.HTTPClient
storage *storage.Storage
stopCh chan struct{}
running bool
mu sync.Mutex
}
var (
monitorService *MonitorService
monitorOnce sync.Once
)
// GetMonitorService 获取监控服务单例
func GetMonitorService() *MonitorService {
monitorOnce.Do(func() {
cfg := config.GetConfig()
monitorService = &MonitorService{
httpClient: utils.NewHTTPClient(cfg.Monitor.Timeout),
storage: storage.GetStorage(),
stopCh: make(chan struct{}),
}
})
return monitorService
}
// Start 启动监控服务
func (s *MonitorService) Start() {
s.mu.Lock()
if s.running {
s.mu.Unlock()
return
}
s.running = true
s.mu.Unlock()
log.Println("监控服务已启动")
// 立即执行一次检测
go s.checkAll()
// 定时检测
cfg := config.GetConfig()
ticker := time.NewTicker(cfg.Monitor.Interval)
defer ticker.Stop()
for {
select {
case <-ticker.C:
go s.checkAll()
case <-s.stopCh:
log.Println("监控服务已停止")
return
}
}
}
// Stop 停止监控服务
func (s *MonitorService) Stop() {
s.mu.Lock()
defer s.mu.Unlock()
if s.running {
close(s.stopCh)
s.running = false
}
}
// checkAll 检查所有网站(错峰执行,避免并发暴涨)
func (s *MonitorService) checkAll() {
websites := s.storage.GetWebsites()
semaphore := make(chan struct{}, 3) // 最多 3 个并发检测
for i, website := range websites {
// 每个网站之间间隔 1 秒,把检测分散开
if i > 0 {
time.Sleep(1 * time.Second)
}
var wg sync.WaitGroup
for _, urlInfo := range website.URLs {
wg.Add(1)
go func(w models.Website, u models.URLInfo) {
defer wg.Done()
semaphore <- struct{}{}
defer func() { <-semaphore }()
s.checkURL(w, u)
}(website, urlInfo)
}
wg.Wait()
}
// 检测完毕后,逐个解析 DNS
s.resolveAllWebsiteIPs(websites)
// 保存记录
s.storage.SaveAll()
log.Printf("本轮检测完成,共 %d 个网站", len(websites))
}
// resolveAllWebsiteIPs 逐个解析所有网站域名 IP每次都刷新
func (s *MonitorService) resolveAllWebsiteIPs(websites []models.Website) {
for i, website := range websites {
if len(website.URLs) == 0 {
continue
}
if i > 0 {
time.Sleep(500 * time.Millisecond)
}
s.resolveWebsiteIP(website)
}
}
// resolveWebsiteIP 解析单个网站的域名 IP
func (s *MonitorService) resolveWebsiteIP(website models.Website) {
if len(website.URLs) == 0 {
return
}
ips, err := utils.ResolveDomainIPs(website.URLs[0].URL)
if err != nil {
log.Printf("DNS解析失败 [%s]: %v", website.Name, err)
return
}
if len(ips) == 0 {
return
}
w := s.storage.GetWebsite(website.ID)
if w == nil {
return
}
w.IPAddresses = ips
w.UpdatedAt = time.Now()
s.storage.UpdateWebsite(*w)
log.Printf("DNS解析 [%s] → %v", website.Name, ips)
}
// checkURL 检查单个URL带重试
func (s *MonitorService) checkURL(website models.Website, urlInfo models.URLInfo) {
cfg := config.GetConfig()
maxRetries := cfg.Monitor.RetryCount
var result utils.CheckResult
for attempt := 0; attempt <= maxRetries; attempt++ {
if attempt > 0 {
time.Sleep(time.Duration(attempt) * 2 * time.Second)
log.Printf("重试 [%s] %s - 第 %d 次重试", website.Name, urlInfo.URL, attempt)
}
result = s.httpClient.CheckWebsiteStatus(urlInfo.URL)
if result.Error == nil && utils.IsSuccessStatus(result.StatusCode) {
break
}
}
record := models.MonitorRecord{
WebsiteID: website.ID,
URLID: urlInfo.ID,
URL: urlInfo.URL,
StatusCode: result.StatusCode,
Latency: result.Latency.Milliseconds(),
IsUp: result.Error == nil && utils.IsSuccessStatus(result.StatusCode),
CheckedAt: time.Now(),
}
if result.Error != nil {
record.Error = result.Error.Error()
}
s.storage.AddRecord(record)
// 仅当网站无标题时才做完整检测来获取元数据
if record.IsUp && website.Title == "" {
fullResult := s.httpClient.CheckWebsite(urlInfo.URL)
if fullResult.Title != "" {
w := s.storage.GetWebsite(website.ID)
if w != nil {
w.Title = fullResult.Title
w.UpdatedAt = time.Now()
s.storage.UpdateWebsite(*w)
}
}
}
log.Printf("检测 [%s] %s - 状态码: %d, 延迟: %dms, 可用: %v",
website.Name, urlInfo.URL, result.StatusCode, result.Latency.Milliseconds(), record.IsUp)
}
// CheckWebsiteNow 立即检查指定网站(状态 + DNS等待完成后保存
func (s *MonitorService) CheckWebsiteNow(websiteID string) {
website := s.storage.GetWebsite(websiteID)
if website == nil {
return
}
// 逐个检测该网站的所有 URL
for _, urlInfo := range website.URLs {
s.checkURL(*website, urlInfo)
}
// 刷新 DNS
s.resolveWebsiteIP(*website)
s.storage.SaveAll()
}
// GetWebsiteStatus 获取网站状态
func (s *MonitorService) GetWebsiteStatus(websiteID string) *models.WebsiteStatus {
website := s.storage.GetWebsite(websiteID)
if website == nil {
return nil
}
status := &models.WebsiteStatus{
Website: *website,
URLStatuses: []models.URLStatus{},
}
now := time.Now()
since24h := now.Add(-24 * time.Hour)
since7d := now.Add(-7 * 24 * time.Hour)
since90d := now.Add(-90 * 24 * time.Hour)
var totalUptime24h, totalUptime7d float64
var urlCount int
var allRecords90d []models.MonitorRecord
for _, urlInfo := range website.URLs {
urlStatus := s.getURLStatus(website.ID, urlInfo, since24h, since7d)
status.URLStatuses = append(status.URLStatuses, urlStatus)
totalUptime24h += urlStatus.Uptime24h
totalUptime7d += urlStatus.Uptime7d
urlCount++
records90d := s.storage.GetRecords(website.ID, urlInfo.ID, since90d)
allRecords90d = append(allRecords90d, records90d...)
}
if urlCount > 0 {
status.Uptime24h = totalUptime24h / float64(urlCount)
status.Uptime7d = totalUptime7d / float64(urlCount)
}
// 90 天逐日统计
status.DailyHistory = s.aggregateByDay(allRecords90d)
if len(allRecords90d) > 0 {
upCount := 0
for _, r := range allRecords90d {
if r.IsUp {
upCount++
}
}
status.Uptime90d = float64(upCount) / float64(len(allRecords90d)) * 100
}
// 获取最后检测时间
for _, urlStatus := range status.URLStatuses {
if urlStatus.CurrentState.CheckedAt.After(status.LastChecked) {
status.LastChecked = urlStatus.CurrentState.CheckedAt
}
}
return status
}
// getURLStatus 获取URL状态
func (s *MonitorService) getURLStatus(websiteID string, urlInfo models.URLInfo, since24h, since7d time.Time) models.URLStatus {
urlStatus := models.URLStatus{
URLInfo: urlInfo,
}
// 获取最新记录
latest := s.storage.GetLatestRecord(websiteID, urlInfo.ID)
if latest != nil {
urlStatus.CurrentState = *latest
}
// 获取24小时记录
records24h := s.storage.GetRecords(websiteID, urlInfo.ID, since24h)
urlStatus.History24h = records24h
// 计算24小时可用率
if len(records24h) > 0 {
upCount := 0
var totalLatency int64
for _, r := range records24h {
if r.IsUp {
upCount++
}
totalLatency += r.Latency
}
urlStatus.Uptime24h = float64(upCount) / float64(len(records24h)) * 100
urlStatus.AvgLatency = totalLatency / int64(len(records24h))
}
// 获取7天记录并按小时统计
records7d := s.storage.GetRecords(websiteID, urlInfo.ID, since7d)
urlStatus.History7d = s.aggregateByHour(records7d)
// 计算7天可用率
if len(records7d) > 0 {
upCount := 0
for _, r := range records7d {
if r.IsUp {
upCount++
}
}
urlStatus.Uptime7d = float64(upCount) / float64(len(records7d)) * 100
}
return urlStatus
}
// aggregateByHour 按小时聚合记录
func (s *MonitorService) aggregateByHour(records []models.MonitorRecord) []models.HourlyStats {
hourlyMap := make(map[string]*models.HourlyStats)
for _, r := range records {
hourKey := r.CheckedAt.Truncate(time.Hour).Format(time.RFC3339)
if _, exists := hourlyMap[hourKey]; !exists {
hourlyMap[hourKey] = &models.HourlyStats{
Hour: r.CheckedAt.Truncate(time.Hour),
}
}
stats := hourlyMap[hourKey]
stats.TotalCount++
if r.IsUp {
stats.UpCount++
}
stats.AvgLatency += r.Latency
}
var result []models.HourlyStats
for _, stats := range hourlyMap {
if stats.TotalCount > 0 {
stats.AvgLatency /= int64(stats.TotalCount)
stats.Uptime = float64(stats.UpCount) / float64(stats.TotalCount) * 100
}
result = append(result, *stats)
}
return result
}
// aggregateByDay 按天聚合记录
func (s *MonitorService) aggregateByDay(records []models.MonitorRecord) []models.DailyStats {
dayMap := make(map[string]*models.DailyStats)
for _, r := range records {
dayTime := time.Date(r.CheckedAt.Year(), r.CheckedAt.Month(), r.CheckedAt.Day(), 0, 0, 0, 0, r.CheckedAt.Location())
dayKey := dayTime.Format("2006-01-02")
if _, exists := dayMap[dayKey]; !exists {
dayMap[dayKey] = &models.DailyStats{
Date: dayTime,
}
}
stats := dayMap[dayKey]
stats.TotalCount++
if r.IsUp {
stats.UpCount++
}
stats.AvgLatency += r.Latency
}
result := make([]models.DailyStats, 0, len(dayMap))
for _, stats := range dayMap {
if stats.TotalCount > 0 {
stats.AvgLatency /= int64(stats.TotalCount)
stats.Uptime = float64(stats.UpCount) / float64(stats.TotalCount) * 100
}
result = append(result, *stats)
}
sort.Slice(result, func(i, j int) bool {
return result[i].Date.Before(result[j].Date)
})
return result
}
// GetAllWebsiteStatuses 获取所有网站状态
func (s *MonitorService) GetAllWebsiteStatuses() []models.WebsiteStatus {
websites := s.storage.GetWebsites()
var statuses []models.WebsiteStatus
for _, website := range websites {
status := s.GetWebsiteStatus(website.ID)
if status != nil {
statuses = append(statuses, *status)
}
}
return statuses
}