package main import ( "github.com/PuerkitoBio/goquery" "github.com/gin-gonic/gin" "github.com/gorilla/websocket" "log" "net/http" "regexp" "strconv" "strings" "sync" "time" ) type fetchData struct { Url string `json:"url"` Title string `json:"title"` Desc string `json:"desc"` Date string `json:"date"` } type connChan struct { conn string msg message } type dataChan struct { conn string item []fetchData } type fetchHandler struct { fetchUrl string hadFetchData []fetchData cronTime map[string]time.Duration keyword map[string]string hadFetchedMap map[string]int reloadCron map[string]chan int isOff chan int rMsgChan chan connChan newFetchItem chan dataChan connMap map[string]*websocket.Conn } type setting struct { Keyword string `json:"keyword"` TimeStep int `json:"timeStep"` } type message struct { Status bool Action string Message string Data interface{} } func setMap[T string | time.Duration | int](obj *mapXS[T], key string, v T) { obj.Lock() (*obj.mapX)[key] = v obj.Unlock() } type mapX[T string | int | time.Duration] map[string]T type mapXS[T string | int | time.Duration] struct { *mapX[T] *sync.Mutex } func newFetchHandler(fetchUrl string) *fetchHandler { return &fetchHandler{ fetchUrl: fetchUrl, keyword: make(map[string]string), hadFetchedMap: make(map[string]int), cronTime: make(map[string]time.Duration), reloadCron: make(map[string]chan int), isOff: make(chan int), rMsgChan: make(chan connChan, 10), newFetchItem: make(chan dataChan, 10), connMap: make(map[string]*websocket.Conn), } } func (f *fetchHandler) handle(conn string) { key := "纪检" if kk, ok := f.keyword[conn]; ok && kk != "" { key = kk } f.parsesDom(f.fetch(f.fetchUrl+key), conn) } func (f *fetchHandler) receiveMsg() { for { r := <-f.rMsgChan switch r.msg.Action { case "search": if t, ok := r.msg.Data.(*setting); ok { f.reloadCron[r.conn] <- t.TimeStep f.keyword[r.conn] = t.Keyword f.handle(r.conn) } } } } func (f *fetchHandler) fetch(url string) *http.Response { defer func() { if r := recover(); r != nil { log.Println(r) } }() client := http.Client{ Transport: nil, CheckRedirect: nil, Jar: nil, Timeout: 10 * time.Second, } req, _ := http.NewRequest("GET", url, nil) req.Header.Add("accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9") req.Header.Add("accept-language", "zh-CN,zh;q=0.9") req.Header.Add("cache-control", "no-cache") req.Header.Add("connection", "keep-alive") req.Header.Add("cookie", "BIDUPSID=844E3DCAA2EEBF5C872DC99B967B6B7B; PSTM=1655872163; BAIDUID=844E3DCAA2EEBF5CB3E1D79750162204:FG=1; BD_UPN=123353; ORIGIN=2; ISSW=1; ISSW=1; BAIDUID_BFESS=844E3DCAA2EEBF5CB3E1D79750162204:FG=1; ZFY=jWFAySgO:AoQfb6emY9vnmEdptVao:Anj0FFkp028wFws:C; BD_HOME=1; delPer=0; BD_CK_SAM=1; PSINO=3; COOKIE_SESSION=42_0_2_2_3_0_1_0_2_0_0_0_18_0_51_0_1655888428_0_1655888377%7C3%230_0_1655888377%7C1; BAIDU_WISE_UID=wapp_1655902298617_702; ZD_ENTRY=google; channel=baidusearch; baikeVisitId=b3b23509-9330-4d33-82ae-b8eb37895917; BA_HECTOR=8k2g2g218ga40181ak1hbgg1n14; BDRCVFR[C0p6oIjvx-c]=mbxnW11j9Dfmh7GuZR8mvqV; BDSVRTM=1011; H_PS_PSSID=36550_36459_36673_36455_36453_36692_36165_36695_36697_36569_36075_36467_36316_36651") req.Header.Add("referer", "http://news.baidu.com/") req.Header.Add("sec-fetch-dest", "document") req.Header.Add("sec-fetch-mode", "navigate") req.Header.Add("sec-fetch-site", "cross-site") req.Header.Add("sec-fetch-user", "?1") req.Header.Add("upgrade-insecure-requests", "1") req.Header.Add("user-agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36") req.Header.Add("#sec-ch-ua", "\".Not/A)Brand\";v=\"99\", \"Google Chrome\";v=\"103\", \"Chromium\";v=\"103\"") req.Header.Add("sec-ch-ua-mobile", "?0") req.Header.Add("sec-ch-ua-platform", "\"Linux\"") req.Header.Add("postman-token", "81407fbc-2b96-54a7-0193-f640156714ab") response, err := client.Do(req) if err != nil { panic(err) } return response } func (f *fetchHandler) parsesDom(html *http.Response, conn string) { defer func() { if r := recover(); r != nil { log.Println(r) } }() doc, err := goquery.NewDocumentFromReader(html.Body) if err != nil { panic(err) } var newFetch []fetchData ti := time.Now() log.Println(ti.Format("2006-01-02 15:04:05")) compile := regexp.MustCompile(`(\d+)`) doc.Find("div[class=\"result-op c-container xpath-log new-pmd\"]").Each(func(i int, selection *goquery.Selection) { data := fetchData{} data.Url, _ = selection.Attr("mu") t := selection.Find(".news-title-font_1xS-F").First() data.Title = t.Text() data.Desc = selection.Find(".c-row .c-color-text").First().Text() data.Date = selection.Find("span[class=\"c-color-gray2 c-font-normal c-gap-right-xsmall\"]").First().Text() n := compile.FindAllStringSubmatch(data.Date, -1) if nil != n { nn, _ := strconv.Atoi(n[0][0]) if strings.Contains(data.Date, "小时") { data.Date = ti.Add(-time.Duration(nn) * time.Hour).Format("2006-01-02 15:04") } if strings.Contains(data.Date, "分钟") { data.Date = ti.Add(-time.Duration(nn) * time.Minute).Format("2006-01-02 15:04") } } k := conn + "_" + data.Url + "_" + data.Title if _, ok := f.hadFetchedMap[k]; !ok { f.hadFetchData = append(f.hadFetchData, data) f.hadFetchedMap[k] = 1 newFetch = append(newFetch, data) } }) if len(newFetch) > 0 { f.newFetchItem <- dataChan{ conn: conn, item: newFetch, } } err = html.Body.Close() if err != nil { panic(err) } } func (f *fetchHandler) sendFetchData() { for { data := <-f.newFetchItem err := f.connMap[data.conn].WriteJSON(message{ Status: true, Action: "newData", Message: "", Data: data.item, }) if err != nil { log.Println(err) } } } func (f *fetchHandler) cronFetch(conn string, c chan int) { step, ok := f.cronTime[conn] if !ok { step = time.Second * 60 } t := time.NewTicker(step) if _, ok := f.cronTime[conn]; !ok { f.reloadCron[conn] = make(chan int) } defer t.Stop() for { select { case <-t.C: f.handle(conn) case tt := <-f.reloadCron[conn]: f.cronTime[conn] = time.Duration(tt) * time.Second go f.cronFetch(conn, c) return case <-c: close(c) return } } } func main() { h := newFetchHandler("https://www.baidu.com/s?rtt=1&bsst=1&cl=2&tn=news&rsv_dl=ns_pc&word=") router := gin.Default() var upgrader = websocket.Upgrader{ CheckOrigin: func(r *http.Request) bool { return true }, } go h.sendFetchData() go h.receiveMsg() router.LoadHTMLGlob("templates/*") //router.LoadHTMLFiles("templates/template1.html", "templates/template2.html") router.GET("/index", func(c *gin.Context) { c.HTML(http.StatusOK, "index.gohtml", gin.H{ "title": "爬虫", }) }) router.GET("ws", func(c *gin.Context) { conn, err := upgrader.Upgrade(c.Writer, c.Request, nil) if err != nil { c.JSON(201, message{ Status: false, Message: err.Error(), Data: nil, Action: "upgradeWs", }) log.Println(err) return } remote := conn.RemoteAddr().String() if _, ok := h.connMap[remote]; !ok { h.connMap[remote] = conn } cc := make(chan int) go h.cronFetch(remote, cc) go func() { msg := connChan{ conn: remote, msg: message{ Data: &setting{}, }, } for { err := conn.ReadJSON(&msg.msg) if err != nil { if _, ok := h.connMap[remote]; ok && !websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway) { delete(h.connMap, remote) cc <- 1 return } log.Println(err) } else { h.rMsgChan <- msg } } }() }) err := router.Run(":8080") if err != nil { panic(err) } }