-
Notifications
You must be signed in to change notification settings - Fork 0
/
handler_scrape.go
56 lines (46 loc) · 1.25 KB
/
handler_scrape.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
package tracker
import (
"net/http"
"github.com/rs/zerolog/log"
"github.com/salimnassim/tracker/metric"
)
func ScrapeHandler(server *Server) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
metric.TrackerScrape.Inc()
infoHash, ok := r.URL.Query()["info_hash"]
if !ok {
log.Error().Str("source", "http_scrape").Msg("info_hash is not present")
failure := ErrorResponse{
FailureReason: "info_hash is not present",
}
replyBencode(w, failure, http.StatusBadRequest)
return
}
var hashes [][]byte
for _, v := range infoHash {
hashes = append(hashes, []byte(v))
}
torrents, err := server.store.Scrape(ctx, hashes)
if err != nil {
log.Error().Err(err).Str("source", "http_scrape").Msg("unable to fetch torrents")
failure := ErrorResponse{
FailureReason: "internal server error",
}
replyBencode(w, failure, http.StatusBadRequest)
return
}
scrape := ScrapeResponse{
Files: make(map[string]ScrapeTorrent),
}
for _, t := range torrents {
scrape.Files[string(t.InfoHash)] = ScrapeTorrent{
Complete: t.Seeders,
Incomplete: t.Leechers,
Downloaded: t.Completed,
}
}
metric.TrackerScrapeReply.Inc()
replyBencode(w, scrape, http.StatusOK)
}
}