package services import ( "log" "sync" "time" "mengyaping-backend/config" "mengyaping-backend/models" "mengyaping-backend/storage" "mengyaping-backend/utils" ) // MonitorService 监控服务 type MonitorService struct { httpClient *utils.HTTPClient storage *storage.Storage stopCh chan struct{} running bool mu sync.Mutex } var ( monitorService *MonitorService monitorOnce sync.Once ) // GetMonitorService 获取监控服务单例 func GetMonitorService() *MonitorService { monitorOnce.Do(func() { cfg := config.GetConfig() monitorService = &MonitorService{ httpClient: utils.NewHTTPClient(cfg.Monitor.Timeout), storage: storage.GetStorage(), stopCh: make(chan struct{}), } }) return monitorService } // Start 启动监控服务 func (s *MonitorService) Start() { s.mu.Lock() if s.running { s.mu.Unlock() return } s.running = true s.mu.Unlock() log.Println("监控服务已启动") // 立即执行一次检测 go s.checkAll() // 定时检测 cfg := config.GetConfig() ticker := time.NewTicker(cfg.Monitor.Interval) defer ticker.Stop() for { select { case <-ticker.C: go s.checkAll() case <-s.stopCh: log.Println("监控服务已停止") return } } } // Stop 停止监控服务 func (s *MonitorService) Stop() { s.mu.Lock() defer s.mu.Unlock() if s.running { close(s.stopCh) s.running = false } } // checkAll 检查所有网站 func (s *MonitorService) checkAll() { websites := s.storage.GetWebsites() var wg sync.WaitGroup semaphore := make(chan struct{}, 10) // 限制并发数 for _, website := range websites { for _, urlInfo := range website.URLs { wg.Add(1) go func(w models.Website, u models.URLInfo) { defer wg.Done() semaphore <- struct{}{} defer func() { <-semaphore }() s.checkURL(w, u) }(website, urlInfo) } } wg.Wait() // 保存记录 s.storage.SaveAll() } // checkURL 检查单个URL func (s *MonitorService) checkURL(website models.Website, urlInfo models.URLInfo) { result := s.httpClient.CheckWebsite(urlInfo.URL) record := models.MonitorRecord{ WebsiteID: website.ID, URLID: urlInfo.ID, URL: urlInfo.URL, StatusCode: result.StatusCode, Latency: result.Latency.Milliseconds(), IsUp: result.Error == nil && utils.IsSuccessStatus(result.StatusCode), CheckedAt: time.Now(), } if result.Error != nil { record.Error = result.Error.Error() } s.storage.AddRecord(record) // 更新网站信息(标题和Favicon) if result.Title != "" || result.Favicon != "" { w := s.storage.GetWebsite(website.ID) if w != nil { needUpdate := false if result.Title != "" && w.Title != result.Title { w.Title = result.Title needUpdate = true } if result.Favicon != "" && w.Favicon != result.Favicon { w.Favicon = result.Favicon needUpdate = true } if needUpdate { w.UpdatedAt = time.Now() s.storage.UpdateWebsite(*w) } } } log.Printf("检测 [%s] %s - 状态码: %d, 延迟: %dms, 可用: %v", website.Name, urlInfo.URL, result.StatusCode, result.Latency.Milliseconds(), record.IsUp) } // CheckWebsiteNow 立即检查指定网站 func (s *MonitorService) CheckWebsiteNow(websiteID string) { website := s.storage.GetWebsite(websiteID) if website == nil { return } for _, urlInfo := range website.URLs { go s.checkURL(*website, urlInfo) } } // GetWebsiteStatus 获取网站状态 func (s *MonitorService) GetWebsiteStatus(websiteID string) *models.WebsiteStatus { website := s.storage.GetWebsite(websiteID) if website == nil { return nil } status := &models.WebsiteStatus{ Website: *website, URLStatuses: []models.URLStatus{}, } now := time.Now() since24h := now.Add(-24 * time.Hour) since7d := now.Add(-7 * 24 * time.Hour) var totalUptime24h, totalUptime7d float64 var urlCount int for _, urlInfo := range website.URLs { urlStatus := s.getURLStatus(website.ID, urlInfo, since24h, since7d) status.URLStatuses = append(status.URLStatuses, urlStatus) totalUptime24h += urlStatus.Uptime24h totalUptime7d += urlStatus.Uptime7d urlCount++ } if urlCount > 0 { status.Uptime24h = totalUptime24h / float64(urlCount) status.Uptime7d = totalUptime7d / float64(urlCount) } // 获取最后检测时间 for _, urlStatus := range status.URLStatuses { if urlStatus.CurrentState.CheckedAt.After(status.LastChecked) { status.LastChecked = urlStatus.CurrentState.CheckedAt } } return status } // getURLStatus 获取URL状态 func (s *MonitorService) getURLStatus(websiteID string, urlInfo models.URLInfo, since24h, since7d time.Time) models.URLStatus { urlStatus := models.URLStatus{ URLInfo: urlInfo, } // 获取最新记录 latest := s.storage.GetLatestRecord(websiteID, urlInfo.ID) if latest != nil { urlStatus.CurrentState = *latest } // 获取24小时记录 records24h := s.storage.GetRecords(websiteID, urlInfo.ID, since24h) urlStatus.History24h = records24h // 计算24小时可用率 if len(records24h) > 0 { upCount := 0 var totalLatency int64 for _, r := range records24h { if r.IsUp { upCount++ } totalLatency += r.Latency } urlStatus.Uptime24h = float64(upCount) / float64(len(records24h)) * 100 urlStatus.AvgLatency = totalLatency / int64(len(records24h)) } // 获取7天记录并按小时统计 records7d := s.storage.GetRecords(websiteID, urlInfo.ID, since7d) urlStatus.History7d = s.aggregateByHour(records7d) // 计算7天可用率 if len(records7d) > 0 { upCount := 0 for _, r := range records7d { if r.IsUp { upCount++ } } urlStatus.Uptime7d = float64(upCount) / float64(len(records7d)) * 100 } return urlStatus } // aggregateByHour 按小时聚合记录 func (s *MonitorService) aggregateByHour(records []models.MonitorRecord) []models.HourlyStats { hourlyMap := make(map[string]*models.HourlyStats) for _, r := range records { hourKey := r.CheckedAt.Truncate(time.Hour).Format(time.RFC3339) if _, exists := hourlyMap[hourKey]; !exists { hourlyMap[hourKey] = &models.HourlyStats{ Hour: r.CheckedAt.Truncate(time.Hour), } } stats := hourlyMap[hourKey] stats.TotalCount++ if r.IsUp { stats.UpCount++ } stats.AvgLatency += r.Latency } var result []models.HourlyStats for _, stats := range hourlyMap { if stats.TotalCount > 0 { stats.AvgLatency /= int64(stats.TotalCount) stats.Uptime = float64(stats.UpCount) / float64(stats.TotalCount) * 100 } result = append(result, *stats) } return result } // GetAllWebsiteStatuses 获取所有网站状态 func (s *MonitorService) GetAllWebsiteStatuses() []models.WebsiteStatus { websites := s.storage.GetWebsites() var statuses []models.WebsiteStatus for _, website := range websites { status := s.GetWebsiteStatus(website.ID) if status != nil { statuses = append(statuses, *status) } } return statuses }