package dhtsearch import ( "encoding/json" "expvar" "fmt" "net/http" "strconv" ) type results struct { Page int `json:"page"` PageSize int `json:"page_size"` Torrents []Torrent `json:"torrents"` } func indexHandler(w http.ResponseWriter, r *http.Request) { w.Header().Set("Cache-Control", "public") if r.URL.Path != "/" { w.WriteHeader(404) return } w.Header().Set("Content-Type", "text/html; charset=utf-8") w.WriteHeader(200) w.Write(html) } func statsHandler(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json; charset=utf-8") w.Header().Set("Cache-Control", "no-cache") w.WriteHeader(200) fmt.Fprintf(w, "{") first := true expvar.Do(func(kv expvar.KeyValue) { if kv.Key == "cmdline" || kv.Key == "memstats" { return } if !first { fmt.Fprintf(w, ",") } first = false fmt.Fprintf(w, "%q: %s", kv.Key, kv.Value) }) fmt.Fprintf(w, "}") } func searchHandler(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json; charset=utf-8") w.Header().Set("Cache-Control", "no-cache") offset := 0 page := 1 var err error pStr := r.URL.Query().Get("page") if pStr != "" { page, err = strconv.Atoi(pStr) if err != nil { fmt.Printf("Failed to parse page: %q\n", err) } offset = (page - 1) * 50 } if q := r.URL.Query().Get("q"); q != "" { torrents, err := torrentsByName(q, offset) if err != nil { w.WriteHeader(500) fmt.Printf("Error: %q\n", err) return } w.WriteHeader(200) json.NewEncoder(w).Encode(results{Page: page, PageSize: Config.ResultsPageSize, Torrents: torrents}) return } if tag := r.URL.Query().Get("tag"); tag != "" { torrents, err := torrentsByTag(tag, offset) if err != nil { w.WriteHeader(500) fmt.Printf("Error: %q\n", err) return } w.WriteHeader(200) json.NewEncoder(w).Encode(results{Page: page, PageSize: Config.ResultsPageSize, Torrents: torrents}) return } w.WriteHeader(406) json.NewEncoder(w).Encode("Query required") } var html = []byte(`