magneticow: search now works perfectly!
- support for ordering is yet to be implemented
This commit is contained in:
parent
44c6ebbb73
commit
0501fc3e3c
@ -25,10 +25,6 @@
|
|||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/Wessie/appdirs"
|
name = "github.com/Wessie/appdirs"
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/anacrolix/dht"
|
|
||||||
|
|
||||||
[[constraint]]
|
[[constraint]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/anacrolix/missinggo"
|
name = "github.com/anacrolix/missinggo"
|
||||||
|
80
cmd/magneticow/api.go
Normal file
80
cmd/magneticow/api.go
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/boramalper/magnetico/pkg/persistence"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
func apiTorrentsHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// @lastOrderedValue AND @lastID are either both supplied or neither of them should be supplied
|
||||||
|
// at all; and if that is NOT the case, then return an error.
|
||||||
|
if q := r.URL.Query(); !(
|
||||||
|
(q.Get("lastOrderedValue") != "" && q.Get("lastID") != "") ||
|
||||||
|
(q.Get("lastOrderedValue") == "" && q.Get("lastID") == "")) {
|
||||||
|
respondError(w, 400, "`lastOrderedValue`, `lastID` must be supplied altogether, if supplied.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var tq TorrentsQ
|
||||||
|
if err := decoder.Decode(&tq, r.URL.Query()); err != nil {
|
||||||
|
respondError(w, 400, "error while parsing the URL: %s", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if tq.Query == nil {
|
||||||
|
tq.Query = new(string)
|
||||||
|
*tq.Query = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if tq.Epoch == nil {
|
||||||
|
tq.Epoch = new(int64)
|
||||||
|
*tq.Epoch = time.Now().Unix() // epoch, if not supplied, is NOW.
|
||||||
|
} else if *tq.Epoch <= 0 {
|
||||||
|
respondError(w, 400, "epoch must be greater than 0")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if tq.LastID != nil && *tq.LastID < 0 {
|
||||||
|
respondError(w, 400, "lastID has to be greater than or equal to zero")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if tq.Ascending == nil {
|
||||||
|
tq.Ascending = new(bool)
|
||||||
|
*tq.Ascending = true
|
||||||
|
}
|
||||||
|
|
||||||
|
torrents, err := database.QueryTorrents(
|
||||||
|
*tq.Query, *tq.Epoch, persistence.ByRelevance,
|
||||||
|
*tq.Ascending, N_TORRENTS, tq.LastOrderedValue, tq.LastID)
|
||||||
|
if err != nil {
|
||||||
|
respondError(w, 400, "query error: %s", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
jm, err := json.MarshalIndent(torrents, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
respondError(w, 500, "json marshalling error: %s", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err = w.Write(jm); err != nil {
|
||||||
|
zap.L().Warn("couldn't write http.ResponseWriter", zap.Error(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func apiTorrentsInfohashHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func apiFilesInfohashHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func apiStatisticsHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
}
|
1
cmd/magneticow/data/static/scripts/mustache-v2.3.0.min.js
vendored
Normal file
1
cmd/magneticow/data/static/scripts/mustache-v2.3.0.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -1,4 +1,132 @@
|
|||||||
function loadMore() {
|
"use strict";
|
||||||
console.log("lastX", canLoadMore, lastID, lastOrderedValue);
|
|
||||||
|
|
||||||
|
const query = (new URL(location)).searchParams.get("query")
|
||||||
|
, epoch = Math.floor(Date.now() / 1000)
|
||||||
|
;
|
||||||
|
let orderBy, ascending; // use `setOrderBy()` to modify orderBy
|
||||||
|
let lastOrderedValue, lastID;
|
||||||
|
|
||||||
|
|
||||||
|
window.onload = function() {
|
||||||
|
if (query !== null && query !== "") {
|
||||||
|
orderBy = "RELEVANCE";
|
||||||
|
}
|
||||||
|
|
||||||
|
const title = document.getElementsByTagName("title")[0];
|
||||||
|
if (query) {
|
||||||
|
title.textContent = query + " - magneticow";
|
||||||
|
const input = document.getElementsByTagName("input")[0];
|
||||||
|
input.setAttribute("value", query);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
title.textContent = "Most recent torrents - magneticow";
|
||||||
|
|
||||||
|
load();
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
function setOrderBy(x) {
|
||||||
|
const validValues = [
|
||||||
|
"TOTAL_SIZE",
|
||||||
|
"DISCOVERED_ON",
|
||||||
|
"UPDATED_ON",
|
||||||
|
"N_FILES",
|
||||||
|
"N_SEEDERS",
|
||||||
|
"N_LEECHERS",
|
||||||
|
"RELEVANCE"
|
||||||
|
];
|
||||||
|
if (!validValues.includes(x)) {
|
||||||
|
throw new Error("invalid value for @orderBy");
|
||||||
|
}
|
||||||
|
orderBy = x;
|
||||||
|
}
|
||||||
|
|
||||||
|
function orderedValue(torrent) {
|
||||||
|
if (orderBy === "TOTAL_SIZE") return torrent.size;
|
||||||
|
else if (orderBy === "DISCOVERED_ON") return torrent.discoveredOn;
|
||||||
|
else if (orderBy === "UPDATED_ON") alert("implement it server side first!");
|
||||||
|
else if (orderBy === "N_FILES") return torrent.nFiles;
|
||||||
|
else if (orderBy === "N_SEEDERS") alert("implement it server side first!");
|
||||||
|
else if (orderBy === "N_LEECHERS") alert("implement it server side first!");
|
||||||
|
else if (orderBy === "RELEVANCE") return torrent.relevance;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function load() {
|
||||||
|
const button = document.getElementsByTagName("button")[0];
|
||||||
|
button.textContent = "Loading More Results...";
|
||||||
|
button.setAttribute("disabled", ""); // disable the button whilst loading...
|
||||||
|
|
||||||
|
const tbody = document.getElementsByTagName("tbody")[0];
|
||||||
|
const template = document.getElementById("row-template").innerHTML;
|
||||||
|
const reqURL = "/api/v0.1/torrents?" + encodeQueryData({
|
||||||
|
query : query,
|
||||||
|
epoch : epoch,
|
||||||
|
lastID : lastID,
|
||||||
|
lastOrderedValue: lastOrderedValue
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("reqURL", reqURL);
|
||||||
|
|
||||||
|
let req = new XMLHttpRequest();
|
||||||
|
req.onreadystatechange = function() {
|
||||||
|
if (req.readyState !== 4)
|
||||||
|
return;
|
||||||
|
|
||||||
|
button.textContent = "Load More Results";
|
||||||
|
button.removeAttribute("disabled");
|
||||||
|
|
||||||
|
if (req.status !== 200)
|
||||||
|
alert(req.responseText);
|
||||||
|
|
||||||
|
let torrents = JSON.parse(req.responseText);
|
||||||
|
if (torrents.length === 0) {
|
||||||
|
button.textContent = "No More Results";
|
||||||
|
button.setAttribute("disabled", "");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let t of torrents) {
|
||||||
|
t.size = fileSize(t.size);
|
||||||
|
t.discoveredOn = (new Date(t.discoveredOn * 1000)).toLocaleDateString("en-GB", {
|
||||||
|
day: "2-digit",
|
||||||
|
month: "2-digit",
|
||||||
|
year: "numeric"
|
||||||
|
});
|
||||||
|
|
||||||
|
tbody.innerHTML += Mustache.render(template, t);
|
||||||
|
}
|
||||||
|
|
||||||
|
const last = torrents[torrents.length - 1];
|
||||||
|
lastID = last.id;
|
||||||
|
lastOrderedValue = orderedValue(last);
|
||||||
|
};
|
||||||
|
|
||||||
|
req.open("GET", reqURL);
|
||||||
|
req.send();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Source: https://stackoverflow.com/a/111545/4466589
|
||||||
|
function encodeQueryData(data) {
|
||||||
|
let ret = [];
|
||||||
|
for (let d in data) {
|
||||||
|
if (data[d] === null || data[d] === undefined)
|
||||||
|
continue;
|
||||||
|
ret.push(encodeURIComponent(d) + "=" + encodeURIComponent(data[d]));
|
||||||
|
}
|
||||||
|
return ret.join("&");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// https://stackoverflow.com/q/10420352/4466589
|
||||||
|
function fileSize(fileSizeInBytes) {
|
||||||
|
let i = -1;
|
||||||
|
let byteUnits = [' kB', ' MB', ' GB', ' TB', ' PB', ' EB', ' ZB', ' YB'];
|
||||||
|
do {
|
||||||
|
fileSizeInBytes = fileSizeInBytes / 1024;
|
||||||
|
i++;
|
||||||
|
} while (fileSizeInBytes > 1024);
|
||||||
|
|
||||||
|
return Math.max(fileSizeInBytes, 0.1).toFixed(1) + byteUnits[i];
|
||||||
}
|
}
|
||||||
|
@ -2,25 +2,35 @@
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>{{ if .Query }}"{{ .Query }}"{{ else }}Most recent torrents{{ end }} - magneticow</title>
|
<title>Search - magneticow</title>
|
||||||
|
|
||||||
<link rel="stylesheet" href="static/styles/reset.css">
|
<link rel="stylesheet" href="static/styles/reset.css">
|
||||||
<link rel="stylesheet" href="static/styles/essential.css">
|
<link rel="stylesheet" href="static/styles/essential.css">
|
||||||
<link rel="stylesheet" href="static/styles/torrents.css">
|
<link rel="stylesheet" href="static/styles/torrents.css">
|
||||||
<script>
|
|
||||||
var canLoadMore = {{ if .CanLoadMore }} true {{ else }} false {{ end }};
|
<script src="static/scripts/mustache-v2.3.0.min.js"></script>
|
||||||
var lastOrderedValue = {{ .LastOrderedValue }};
|
|
||||||
var lastID = {{ .LastID }};
|
|
||||||
</script>
|
|
||||||
<script src="static/scripts/torrents.js"></script>
|
<script src="static/scripts/torrents.js"></script>
|
||||||
|
|
||||||
|
<script id="row-template" type="text/x-handlebars-template">
|
||||||
|
<tr>
|
||||||
|
<td><a href="magnet:?xt=urn:btih:{{infoHash}}&dn={{name}}">
|
||||||
|
<img src="static/assets/magnet.gif" alt="Magnet link"
|
||||||
|
title="Download this torrent using magnet" /></a></td>
|
||||||
|
<td><a href="/torrents/{{infoHash}}/{{name}}">{{name}}</a></td>
|
||||||
|
<td>{{size}}</td>
|
||||||
|
<td>{{discoveredOn}}</td>
|
||||||
|
</tr>
|
||||||
|
</script>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<header>
|
<header>
|
||||||
<div><a href="/"><b>magnetico<sup>w</sup></b></a>​<sub>(pre-alpha)</sub></div>
|
<div><a href="/"><b>magnetico<sup>w</sup></b></a>​<sub>(pre-alpha)</sub></div>
|
||||||
|
<!-- TODO: why make a GET request again? handle it client-side -->
|
||||||
<form action="/torrents" method="get" autocomplete="off" role="search">
|
<form action="/torrents" method="get" autocomplete="off" role="search">
|
||||||
<input type="search" name="query" placeholder="Search the BitTorrent DHT" value="{{ .Query }}">
|
<input type="search" name="query" placeholder="Search the BitTorrent DHT">
|
||||||
</form>
|
</form>
|
||||||
<div>
|
<div>
|
||||||
<a href="{{ .SubscriptionURL }}"><img src="static/assets/feed.png"
|
<a href="TODO-NOT-READY-YET"><img src="static/assets/feed.png"
|
||||||
alt="feed icon" title="subscribe" /> subscribe</a>
|
alt="feed icon" title="subscribe" /> subscribe</a>
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
@ -34,22 +44,12 @@
|
|||||||
<th>Discovered on</th>
|
<th>Discovered on</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody id="rows">
|
||||||
{{ range .Torrents }}
|
|
||||||
<tr>
|
|
||||||
<td><a href="magnet:?xt=urn:btih:{{ bytesToHex .InfoHash }}&dn={{ .Name }}">
|
|
||||||
<img src="static/assets/magnet.gif" alt="Magnet link"
|
|
||||||
title="Download this torrent using magnet" /></a></td>
|
|
||||||
<td><a href="/torrents/{{ bytesToHex .InfoHash }}/{{ .Name }}">{{ .Name }}</a></td>
|
|
||||||
<td>{{ humanizeSize .Size }}</td>
|
|
||||||
<td>{{ unixTimeToYearMonthDay .DiscoveredOn }}</td>
|
|
||||||
</tr>
|
|
||||||
{{ end }}
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</main>
|
</main>
|
||||||
<footer>
|
<footer>
|
||||||
<button onclick="loadMore();" {{ if not .CanLoadMore }} disabled {{ end }}>
|
<button onclick="load();">
|
||||||
Load More Results
|
Load More Results
|
||||||
</button>
|
</button>
|
||||||
</footer>
|
</footer>
|
||||||
|
@ -7,16 +7,12 @@ import (
|
|||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
//"strconv"
|
|
||||||
"strings"
|
|
||||||
// "time"
|
|
||||||
|
|
||||||
"github.com/dustin/go-humanize"
|
"github.com/dustin/go-humanize"
|
||||||
// "github.com/dustin/go-humanize"
|
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
|
"github.com/gorilla/schema"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
"go.uber.org/zap/zapcore"
|
"go.uber.org/zap/zapcore"
|
||||||
|
|
||||||
@ -25,9 +21,23 @@ import (
|
|||||||
|
|
||||||
const N_TORRENTS = 20
|
const N_TORRENTS = 20
|
||||||
|
|
||||||
|
// Set a Decoder instance as a package global, because it caches
|
||||||
|
// meta-data about structs, and an instance can be shared safely.
|
||||||
|
var decoder = schema.NewDecoder()
|
||||||
|
|
||||||
var templates map[string]*template.Template
|
var templates map[string]*template.Template
|
||||||
var database persistence.Database
|
var database persistence.Database
|
||||||
|
|
||||||
|
// ======= Q: Query =======
|
||||||
|
type TorrentsQ struct {
|
||||||
|
Epoch *int64 `schema:"epoch"`
|
||||||
|
Query *string `schema:"query"`
|
||||||
|
OrderBy *string `schema:"orderBy"`
|
||||||
|
Ascending *bool `schema:"ascending"`
|
||||||
|
LastOrderedValue *float64 `schema:"lastOrderedValue"`
|
||||||
|
LastID *uint64 `schema:"lastID"`
|
||||||
|
}
|
||||||
|
|
||||||
// ========= TD: TemplateData =========
|
// ========= TD: TemplateData =========
|
||||||
type HomepageTD struct {
|
type HomepageTD struct {
|
||||||
NTorrents uint
|
NTorrents uint
|
||||||
@ -41,7 +51,7 @@ type TorrentsTD struct {
|
|||||||
SortedBy string
|
SortedBy string
|
||||||
NextPageExists bool
|
NextPageExists bool
|
||||||
Epoch int64
|
Epoch int64
|
||||||
LastOrderedValue uint64
|
LastOrderedValue float64
|
||||||
LastID uint64
|
LastID uint64
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -72,12 +82,20 @@ func main() {
|
|||||||
|
|
||||||
router := mux.NewRouter()
|
router := mux.NewRouter()
|
||||||
router.HandleFunc("/", rootHandler)
|
router.HandleFunc("/", rootHandler)
|
||||||
|
|
||||||
|
router.HandleFunc("/api/v0.1/torrents", apiTorrentsHandler)
|
||||||
|
router.HandleFunc("/api/v0.1/torrents/{infohash:[a-z0-9]{40}}", apiTorrentsInfohashHandler)
|
||||||
|
router.HandleFunc("/api/v0.1/files/{infohash:[a-z0-9]{40}}", apiFilesInfohashHandler)
|
||||||
|
router.HandleFunc("/api/v0.1/statistics", apiStatisticsHandler)
|
||||||
|
|
||||||
router.HandleFunc("/torrents", torrentsHandler)
|
router.HandleFunc("/torrents", torrentsHandler)
|
||||||
router.HandleFunc("/torrents/{infohash:[a-z0-9]{40}}", torrentsInfohashHandler)
|
router.HandleFunc("/torrents/{infohash:[a-z0-9]{40}}", torrentsInfohashHandler)
|
||||||
router.HandleFunc("/statistics", statisticsHandler)
|
router.HandleFunc("/statistics", statisticsHandler)
|
||||||
|
router.HandleFunc("/feed", feedHandler)
|
||||||
|
|
||||||
router.PathPrefix("/static").HandlerFunc(staticHandler)
|
router.PathPrefix("/static").HandlerFunc(staticHandler)
|
||||||
|
|
||||||
router.HandleFunc("/feed", feedHandler)
|
|
||||||
|
|
||||||
templateFunctions := template.FuncMap{
|
templateFunctions := template.FuncMap{
|
||||||
"add": func(augend int, addends int) int {
|
"add": func(augend int, addends int) int {
|
||||||
@ -118,7 +136,7 @@ func main() {
|
|||||||
templates["homepage"] = template.Must(template.New("homepage").Funcs(templateFunctions).Parse(string(mustAsset("templates/homepage.html"))))
|
templates["homepage"] = template.Must(template.New("homepage").Funcs(templateFunctions).Parse(string(mustAsset("templates/homepage.html"))))
|
||||||
// templates["statistics"] = template.Must(template.New("statistics").Parse(string(mustAsset("templates/statistics.html"))))
|
// templates["statistics"] = template.Must(template.New("statistics").Parse(string(mustAsset("templates/statistics.html"))))
|
||||||
// templates["torrent"] = template.Must(template.New("torrent").Funcs(templateFunctions).Parse(string(mustAsset("templates/torrent.html"))))
|
// templates["torrent"] = template.Must(template.New("torrent").Funcs(templateFunctions).Parse(string(mustAsset("templates/torrent.html"))))
|
||||||
templates["torrents"] = template.Must(template.New("torrents").Funcs(templateFunctions).Parse(string(mustAsset("templates/torrents.html"))))
|
// templates["torrents"] = template.Must(template.New("torrents").Funcs(templateFunctions).Parse(string(mustAsset("templates/torrents.html"))))
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
database, err = persistence.MakeDatabase("sqlite3:///home/bora/.local/share/magneticod/database.sqlite3", logger)
|
database, err = persistence.MakeDatabase("sqlite3:///home/bora/.local/share/magneticod/database.sqlite3", logger)
|
||||||
@ -126,8 +144,14 @@ func main() {
|
|||||||
panic(err.Error())
|
panic(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
decoder.IgnoreUnknownKeys(false)
|
||||||
|
decoder.ZeroEmpty(true)
|
||||||
|
|
||||||
zap.L().Info("magneticow is ready to serve!")
|
zap.L().Info("magneticow is ready to serve!")
|
||||||
http.ListenAndServe(":8080", router)
|
err = http.ListenAndServe(":10101", router)
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("ListenAndServe error", zap.Error(err))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// DONE
|
// DONE
|
||||||
@ -136,84 +160,26 @@ func rootHandler(w http.ResponseWriter, r *http.Request) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err.Error())
|
panic(err.Error())
|
||||||
}
|
}
|
||||||
templates["homepage"].Execute(w, HomepageTD{
|
|
||||||
|
err = templates["homepage"].Execute(w, HomepageTD{
|
||||||
NTorrents: nTorrents,
|
NTorrents: nTorrents,
|
||||||
})
|
})
|
||||||
|
if err != nil {
|
||||||
|
panic(err.Error())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: I think there is a standard lib. function for this
|
||||||
func respondError(w http.ResponseWriter, statusCode int, format string, a ...interface{}) {
|
func respondError(w http.ResponseWriter, statusCode int, format string, a ...interface{}) {
|
||||||
w.WriteHeader(statusCode)
|
w.WriteHeader(statusCode)
|
||||||
w.Write([]byte(fmt.Sprintf(format, a...)))
|
w.Write([]byte(fmt.Sprintf(format, a...)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: we might as well move torrents.html into static...
|
||||||
func torrentsHandler(w http.ResponseWriter, r *http.Request) {
|
func torrentsHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
// TODO: Parsing URL Query is tedious and looks stupid... can we do better?
|
data := mustAsset("templates/torrents.html")
|
||||||
queryValues := r.URL.Query()
|
w.Header().Set("Content-Type", http.DetectContentType(data))
|
||||||
|
w.Write(data)
|
||||||
var query string
|
|
||||||
epoch := time.Now().Unix() // epoch, if not supplied, is NOW.
|
|
||||||
var lastOrderedValue, lastID *uint64
|
|
||||||
|
|
||||||
if query = queryValues.Get("query"); query == "" {
|
|
||||||
respondError(w, 400, "query is missing")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if queryValues.Get("epoch") != "" && queryValues.Get("lastOrderedValue") != "" && queryValues.Get("lastID") != "" {
|
|
||||||
var err error
|
|
||||||
|
|
||||||
epoch, err = strconv.ParseInt(queryValues.Get("epoch"), 10, 64)
|
|
||||||
if err != nil {
|
|
||||||
respondError(w, 400, "error while parsing epoch: %s", err.Error())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if epoch <= 0 {
|
|
||||||
respondError(w, 400, "epoch has to be greater than zero")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
*lastOrderedValue, err = strconv.ParseUint(queryValues.Get("lastOrderedValue"), 10, 64)
|
|
||||||
if err != nil {
|
|
||||||
respondError(w, 400, "error while parsing lastOrderedValue: %s", err.Error())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if *lastOrderedValue <= 0 {
|
|
||||||
respondError(w, 400, "lastOrderedValue has to be greater than zero")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
*lastID, err = strconv.ParseUint(queryValues.Get("lastID"), 10, 64)
|
|
||||||
if err != nil {
|
|
||||||
respondError(w, 400, "error while parsing lastID: %s", err.Error())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if *lastID <= 0 {
|
|
||||||
respondError(w, 400, "lastID has to be greater than zero")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
} else if !(queryValues.Get("epoch") == "" && queryValues.Get("lastOrderedValue") == "" && queryValues.Get("lastID") == "") {
|
|
||||||
respondError(w, 400, "`epoch`, `lastOrderedValue`, `lastID` must be supplied altogether, if supplied.")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
torrents, err := database.QueryTorrents(query, epoch, persistence.ByRelevance, true, 20, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
respondError(w, 400, "query error: %s", err.Error())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if torrents == nil {
|
|
||||||
panic("torrents is nil!!!")
|
|
||||||
}
|
|
||||||
|
|
||||||
templates["torrents"].Execute(w, TorrentsTD{
|
|
||||||
CanLoadMore: true,
|
|
||||||
Query: query,
|
|
||||||
SubscriptionURL: "borabora",
|
|
||||||
Torrents: torrents,
|
|
||||||
SortedBy: "anan",
|
|
||||||
NextPageExists: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func torrentsInfohashHandler(w http.ResponseWriter, r *http.Request) {
|
func torrentsInfohashHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
95
cmd/magneticow/main_test.go
Normal file
95
cmd/magneticow/main_test.go
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
type schemaStruct struct {
|
||||||
|
PString *string `schema:"pstring"`
|
||||||
|
PUint64 *uint64 `schema:"puint64"`
|
||||||
|
PBool *bool `schema:"pbool"`
|
||||||
|
|
||||||
|
String string `schema:"string"`
|
||||||
|
Uint64 uint64 `schema:"uint64"`
|
||||||
|
Bool bool `schema:"bool"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type schemaRStruct struct {
|
||||||
|
Uint64 uint64 `schema:"ruint64,required"` // https://github.com/gorilla/schema/pull/68
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSchemaUnsuppliedNil tests that unsupplied values yield nil.
|
||||||
|
func TestSchemaUnsuppliedNil(t *testing.T) {
|
||||||
|
ss := new(schemaStruct)
|
||||||
|
if err := decoder.Decode(ss, make(map[string][]string)); err != nil {
|
||||||
|
t.Error("decoding error", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if ss.PString != nil { t.Error("PString is not nil") }
|
||||||
|
if ss.PUint64 != nil { t.Error("PUint64 is not nil") }
|
||||||
|
if ss.PBool != nil { t.Error("PBool is not nil") }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSchemaInvalidUint64 tests that an invalid uint64 value yields nil.
|
||||||
|
func TestSchemaInvalidUint64(t *testing.T) {
|
||||||
|
dict := make(map[string][]string)
|
||||||
|
dict["puint64"] = []string{"-1"}
|
||||||
|
|
||||||
|
ss := new(schemaStruct)
|
||||||
|
err := decoder.Decode(ss, dict)
|
||||||
|
if err == nil { t.Error("err is nil") }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSchemaInvalidBool tests that an invalid bool value yields nil.
|
||||||
|
func TestSchemaInvalidBool(t *testing.T) {
|
||||||
|
dict := make(map[string][]string)
|
||||||
|
dict["pbool"] = []string{"yyy"}
|
||||||
|
|
||||||
|
ss := new(schemaStruct)
|
||||||
|
err := decoder.Decode(ss, dict)
|
||||||
|
if err == nil { t.Error("err is nil") }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSchemaOverflow tests that integer values greater than the maximum value a field can store
|
||||||
|
// leads to decoding errors, rather than silent overflows.
|
||||||
|
func TestSchemaOverflow(t *testing.T) {
|
||||||
|
dict := make(map[string][]string)
|
||||||
|
dict["puint64"] = []string{"18446744073709551616"} // 18,446,744,073,709,551,615 + 1
|
||||||
|
|
||||||
|
ss := new(schemaStruct)
|
||||||
|
err := decoder.Decode(ss, dict)
|
||||||
|
if err == nil { t.Error("err is nil") }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSchemaEmptyString tests that empty string yields nil.
|
||||||
|
func TestSchemaEmptyString(t *testing.T) {
|
||||||
|
dict := make(map[string][]string)
|
||||||
|
dict["pstring"] = []string{""}
|
||||||
|
|
||||||
|
ss := new(schemaStruct)
|
||||||
|
if err := decoder.Decode(ss, make(map[string][]string)); err != nil {
|
||||||
|
t.Error("decoding error", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if ss.PString != nil { t.Error("PString is not nil") }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSchemaDefault tests if unsupplied values defaults to "zero" and doesn't err
|
||||||
|
func TestSchemaDefault(t *testing.T) {
|
||||||
|
ss := new(schemaStruct)
|
||||||
|
if err := decoder.Decode(ss, make(map[string][]string)); err != nil {
|
||||||
|
t.Error("decoding error", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if ss.String != "" { t.Error("String is not empty") }
|
||||||
|
if ss.Uint64 != 0 { t.Error("Uint64 is not 0") }
|
||||||
|
if ss.Bool != false { t.Error("Bool is not false") }
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSchemaRequired(t *testing.T) {
|
||||||
|
rs := new(schemaRStruct)
|
||||||
|
err := decoder.Decode(rs, make(map[string][]string))
|
||||||
|
if err == nil { t.Error("err is nil") }
|
||||||
|
fmt.Printf(err.Error())
|
||||||
|
}
|
@ -1,6 +1,8 @@
|
|||||||
package persistence
|
package persistence
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
@ -21,13 +23,15 @@ type Database interface {
|
|||||||
// * that match the @query if it's not empty, else all torrents
|
// * that match the @query if it's not empty, else all torrents
|
||||||
// * ordered by the @orderBy in ascending order if @ascending is true, else in descending order
|
// * ordered by the @orderBy in ascending order if @ascending is true, else in descending order
|
||||||
// after skipping (@page * @pageSize) torrents that also fits the criteria above.
|
// after skipping (@page * @pageSize) torrents that also fits the criteria above.
|
||||||
|
//
|
||||||
|
// On error, returns (nil, error), otherwise a non-nil slice of TorrentMetadata and nil.
|
||||||
QueryTorrents(
|
QueryTorrents(
|
||||||
query string,
|
query string,
|
||||||
epoch int64,
|
epoch int64,
|
||||||
orderBy orderingCriteria,
|
orderBy orderingCriteria,
|
||||||
ascending bool,
|
ascending bool,
|
||||||
limit uint,
|
limit uint,
|
||||||
lastOrderedValue *uint64,
|
lastOrderedValue *float64,
|
||||||
lastID *uint64,
|
lastID *uint64,
|
||||||
) ([]TorrentMetadata, error)
|
) ([]TorrentMetadata, error)
|
||||||
// GetTorrents returns the TorrentExtMetadata for the torrent of the given InfoHash. Will return
|
// GetTorrents returns the TorrentExtMetadata for the torrent of the given InfoHash. Will return
|
||||||
@ -67,11 +71,24 @@ type File struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type TorrentMetadata struct {
|
type TorrentMetadata struct {
|
||||||
InfoHash []byte
|
ID uint64 `json:"id"`
|
||||||
Name string
|
InfoHash []byte `json:"infoHash"` // marshalled differently
|
||||||
Size uint64
|
Name string `json:"name"`
|
||||||
DiscoveredOn int64
|
Size uint64 `json:"size"`
|
||||||
NFiles uint
|
DiscoveredOn int64 `json:"discoveredOn"`
|
||||||
|
NFiles uint `json:"nFiles"`
|
||||||
|
Relevance float64 `json:"relevance"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tm *TorrentMetadata) MarshalJSON() ([]byte, error) {
|
||||||
|
type Alias TorrentMetadata
|
||||||
|
return json.Marshal(&struct {
|
||||||
|
InfoHash string `json:"infoHash"`
|
||||||
|
*Alias
|
||||||
|
}{
|
||||||
|
InfoHash: hex.EncodeToString(tm.InfoHash),
|
||||||
|
Alias: (*Alias)(tm),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func MakeDatabase(rawURL string, logger *zap.Logger) (Database, error) {
|
func MakeDatabase(rawURL string, logger *zap.Logger) (Database, error) {
|
||||||
|
@ -166,7 +166,7 @@ func (db *sqlite3Database) QueryTorrents(
|
|||||||
orderBy orderingCriteria,
|
orderBy orderingCriteria,
|
||||||
ascending bool,
|
ascending bool,
|
||||||
limit uint,
|
limit uint,
|
||||||
lastOrderedValue *uint64,
|
lastOrderedValue *float64,
|
||||||
lastID *uint64,
|
lastID *uint64,
|
||||||
) ([]TorrentMetadata, error) {
|
) ([]TorrentMetadata, error) {
|
||||||
if query == "" && orderBy == ByRelevance {
|
if query == "" && orderBy == ByRelevance {
|
||||||
@ -177,15 +177,21 @@ func (db *sqlite3Database) QueryTorrents(
|
|||||||
}
|
}
|
||||||
|
|
||||||
doJoin := query != ""
|
doJoin := query != ""
|
||||||
firstPage := true // lastID != nil
|
firstPage := lastID == nil
|
||||||
|
|
||||||
// executeTemplate is used to prepare the SQL query, WITH PLACEHOLDERS FOR USER INPUT.
|
// executeTemplate is used to prepare the SQL query, WITH PLACEHOLDERS FOR USER INPUT.
|
||||||
sqlQuery := executeTemplate(`
|
sqlQuery := executeTemplate(`
|
||||||
SELECT info_hash
|
SELECT id
|
||||||
|
, info_hash
|
||||||
, name
|
, name
|
||||||
, total_size
|
, total_size
|
||||||
, discovered_on
|
, discovered_on
|
||||||
, (SELECT COUNT(*) FROM files WHERE torrents.id = files.torrent_id) AS n_files
|
, (SELECT COUNT(*) FROM files WHERE torrents.id = files.torrent_id) AS n_files
|
||||||
|
{{ if .DoJoin }}
|
||||||
|
, idx.rank
|
||||||
|
{{ else }}
|
||||||
|
, 0
|
||||||
|
{{ end }}
|
||||||
FROM torrents
|
FROM torrents
|
||||||
{{ if .DoJoin }}
|
{{ if .DoJoin }}
|
||||||
INNER JOIN (
|
INNER JOIN (
|
||||||
@ -250,7 +256,16 @@ func (db *sqlite3Database) QueryTorrents(
|
|||||||
torrents := make([]TorrentMetadata, 0)
|
torrents := make([]TorrentMetadata, 0)
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var torrent TorrentMetadata
|
var torrent TorrentMetadata
|
||||||
if err = rows.Scan(&torrent.InfoHash, &torrent.Name, &torrent.Size, &torrent.DiscoveredOn, &torrent.NFiles); err != nil {
|
err = rows.Scan(
|
||||||
|
&torrent.ID,
|
||||||
|
&torrent.InfoHash,
|
||||||
|
&torrent.Name,
|
||||||
|
&torrent.Size,
|
||||||
|
&torrent.DiscoveredOn,
|
||||||
|
&torrent.NFiles,
|
||||||
|
&torrent.Relevance,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
torrents = append(torrents, torrent)
|
torrents = append(torrents, torrent)
|
||||||
|
Loading…
Reference in New Issue
Block a user