From 0501fc3e3c8d6c3a5e40292503323dd4636d8c02 Mon Sep 17 00:00:00 2001 From: Bora Alper Date: Tue, 19 Jun 2018 18:49:46 +0300 Subject: [PATCH] magneticow: search now works perfectly! - support for ordering is yet to be implemented --- Gopkg.toml | 4 - cmd/magneticow/api.go | 80 +++++++++++ .../static/scripts/mustache-v2.3.0.min.js | 1 + .../data/static/scripts/torrents.js | 132 +++++++++++++++++- cmd/magneticow/data/templates/torrents.html | 40 +++--- cmd/magneticow/main.go | 122 ++++++---------- cmd/magneticow/main_test.go | 95 +++++++++++++ pkg/persistence/interface.go | 29 +++- pkg/persistence/sqlite3.go | 23 ++- 9 files changed, 412 insertions(+), 114 deletions(-) create mode 100644 cmd/magneticow/api.go create mode 100644 cmd/magneticow/data/static/scripts/mustache-v2.3.0.min.js create mode 100644 cmd/magneticow/main_test.go diff --git a/Gopkg.toml b/Gopkg.toml index d36adfa..980964b 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -25,10 +25,6 @@ branch = "master" name = "github.com/Wessie/appdirs" -[[constraint]] - branch = "master" - name = "github.com/anacrolix/dht" - [[constraint]] branch = "master" name = "github.com/anacrolix/missinggo" diff --git a/cmd/magneticow/api.go b/cmd/magneticow/api.go new file mode 100644 index 0000000..f5a8119 --- /dev/null +++ b/cmd/magneticow/api.go @@ -0,0 +1,80 @@ +package main + +import ( + "encoding/json" + "net/http" + "time" + + "github.com/boramalper/magnetico/pkg/persistence" + "go.uber.org/zap" +) + +func apiTorrentsHandler(w http.ResponseWriter, r *http.Request) { + // @lastOrderedValue AND @lastID are either both supplied or neither of them should be supplied + // at all; and if that is NOT the case, then return an error. + if q := r.URL.Query(); !( + (q.Get("lastOrderedValue") != "" && q.Get("lastID") != "") || + (q.Get("lastOrderedValue") == "" && q.Get("lastID") == "")) { + respondError(w, 400, "`lastOrderedValue`, `lastID` must be supplied altogether, if supplied.") + return + } + + var tq TorrentsQ + if err := decoder.Decode(&tq, r.URL.Query()); err != nil { + respondError(w, 400, "error while parsing the URL: %s", err.Error()) + return + } + + if tq.Query == nil { + tq.Query = new(string) + *tq.Query = "" + } + + if tq.Epoch == nil { + tq.Epoch = new(int64) + *tq.Epoch = time.Now().Unix() // epoch, if not supplied, is NOW. + } else if *tq.Epoch <= 0 { + respondError(w, 400, "epoch must be greater than 0") + return + } + + if tq.LastID != nil && *tq.LastID < 0 { + respondError(w, 400, "lastID has to be greater than or equal to zero") + return + } + + if tq.Ascending == nil { + tq.Ascending = new(bool) + *tq.Ascending = true + } + + torrents, err := database.QueryTorrents( + *tq.Query, *tq.Epoch, persistence.ByRelevance, + *tq.Ascending, N_TORRENTS, tq.LastOrderedValue, tq.LastID) + if err != nil { + respondError(w, 400, "query error: %s", err.Error()) + return + } + + jm, err := json.MarshalIndent(torrents, "", " ") + if err != nil { + respondError(w, 500, "json marshalling error: %s", err.Error()) + return + } + + if _, err = w.Write(jm); err != nil { + zap.L().Warn("couldn't write http.ResponseWriter", zap.Error(err)) + } +} + +func apiTorrentsInfohashHandler(w http.ResponseWriter, r *http.Request) { + +} + +func apiFilesInfohashHandler(w http.ResponseWriter, r *http.Request) { + +} + +func apiStatisticsHandler(w http.ResponseWriter, r *http.Request) { + +} diff --git a/cmd/magneticow/data/static/scripts/mustache-v2.3.0.min.js b/cmd/magneticow/data/static/scripts/mustache-v2.3.0.min.js new file mode 100644 index 0000000..520cfcb --- /dev/null +++ b/cmd/magneticow/data/static/scripts/mustache-v2.3.0.min.js @@ -0,0 +1 @@ +(function defineMustache(global,factory){if(typeof exports==="object"&&exports&&typeof exports.nodeName!=="string"){factory(exports)}else if(typeof define==="function"&&define.amd){define(["exports"],factory)}else{global.Mustache={};factory(global.Mustache)}})(this,function mustacheFactory(mustache){var objectToString=Object.prototype.toString;var isArray=Array.isArray||function isArrayPolyfill(object){return objectToString.call(object)==="[object Array]"};function isFunction(object){return typeof object==="function"}function typeStr(obj){return isArray(obj)?"array":typeof obj}function escapeRegExp(string){return string.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&")}function hasProperty(obj,propName){return obj!=null&&typeof obj==="object"&&propName in obj}var regExpTest=RegExp.prototype.test;function testRegExp(re,string){return regExpTest.call(re,string)}var nonSpaceRe=/\S/;function isWhitespace(string){return!testRegExp(nonSpaceRe,string)}var entityMap={"&":"&","<":"<",">":">",'"':""","'":"'","/":"/","`":"`","=":"="};function escapeHtml(string){return String(string).replace(/[&<>"'`=\/]/g,function fromEntityMap(s){return entityMap[s]})}var whiteRe=/\s*/;var spaceRe=/\s+/;var equalsRe=/\s*=/;var curlyRe=/\s*\}/;var tagRe=/#|\^|\/|>|\{|&|=|!/;function parseTemplate(template,tags){if(!template)return[];var sections=[];var tokens=[];var spaces=[];var hasTag=false;var nonSpace=false;function stripSpace(){if(hasTag&&!nonSpace){while(spaces.length)delete tokens[spaces.pop()]}else{spaces=[]}hasTag=false;nonSpace=false}var openingTagRe,closingTagRe,closingCurlyRe;function compileTags(tagsToCompile){if(typeof tagsToCompile==="string")tagsToCompile=tagsToCompile.split(spaceRe,2);if(!isArray(tagsToCompile)||tagsToCompile.length!==2)throw new Error("Invalid tags: "+tagsToCompile);openingTagRe=new RegExp(escapeRegExp(tagsToCompile[0])+"\\s*");closingTagRe=new RegExp("\\s*"+escapeRegExp(tagsToCompile[1]));closingCurlyRe=new RegExp("\\s*"+escapeRegExp("}"+tagsToCompile[1]))}compileTags(tags||mustache.tags);var scanner=new Scanner(template);var start,type,value,chr,token,openSection;while(!scanner.eos()){start=scanner.pos;value=scanner.scanUntil(openingTagRe);if(value){for(var i=0,valueLength=value.length;i0?sections[sections.length-1][4]:nestedTokens;break;default:collector.push(token)}}return nestedTokens}function Scanner(string){this.string=string;this.tail=string;this.pos=0}Scanner.prototype.eos=function eos(){return this.tail===""};Scanner.prototype.scan=function scan(re){var match=this.tail.match(re);if(!match||match.index!==0)return"";var string=match[0];this.tail=this.tail.substring(string.length);this.pos+=string.length;return string};Scanner.prototype.scanUntil=function scanUntil(re){var index=this.tail.search(re),match;switch(index){case-1:match=this.tail;this.tail="";break;case 0:match="";break;default:match=this.tail.substring(0,index);this.tail=this.tail.substring(index)}this.pos+=match.length;return match};function Context(view,parentContext){this.view=view;this.cache={".":this.view};this.parent=parentContext}Context.prototype.push=function push(view){return new Context(view,this)};Context.prototype.lookup=function lookup(name){var cache=this.cache;var value;if(cache.hasOwnProperty(name)){value=cache[name]}else{var context=this,names,index,lookupHit=false;while(context){if(name.indexOf(".")>0){value=context.view;names=name.split(".");index=0;while(value!=null&&index")value=this.renderPartial(token,context,partials,originalTemplate);else if(symbol==="&")value=this.unescapedValue(token,context);else if(symbol==="name")value=this.escapedValue(token,context);else if(symbol==="text")value=this.rawValue(token);if(value!==undefined)buffer+=value}return buffer};Writer.prototype.renderSection=function renderSection(token,context,partials,originalTemplate){var self=this;var buffer="";var value=context.lookup(token[1]);function subRender(template){return self.render(template,context,partials)}if(!value)return;if(isArray(value)){for(var j=0,valueLength=value.length;j 1024); + + return Math.max(fileSizeInBytes, 0.1).toFixed(1) + byteUnits[i]; } diff --git a/cmd/magneticow/data/templates/torrents.html b/cmd/magneticow/data/templates/torrents.html index 96fb6fa..ef58ade 100644 --- a/cmd/magneticow/data/templates/torrents.html +++ b/cmd/magneticow/data/templates/torrents.html @@ -2,25 +2,35 @@ - {{ if .Query }}"{{ .Query }}"{{ else }}Most recent torrents{{ end }} - magneticow + Search - magneticow + - + + + +
magneticow(pre-alpha)
+
- +
@@ -34,22 +44,12 @@ Discovered on - - {{ range .Torrents }} - - - Magnet link - {{ .Name }} - {{ humanizeSize .Size }} - {{ unixTimeToYearMonthDay .DiscoveredOn }} - - {{ end }} + diff --git a/cmd/magneticow/main.go b/cmd/magneticow/main.go index 8e90c35..8bdb1a9 100644 --- a/cmd/magneticow/main.go +++ b/cmd/magneticow/main.go @@ -7,16 +7,12 @@ import ( "log" "net/http" "os" - "strconv" + "strings" "time" - //"strconv" - "strings" - // "time" - "github.com/dustin/go-humanize" - // "github.com/dustin/go-humanize" "github.com/gorilla/mux" + "github.com/gorilla/schema" "go.uber.org/zap" "go.uber.org/zap/zapcore" @@ -25,9 +21,23 @@ import ( const N_TORRENTS = 20 +// Set a Decoder instance as a package global, because it caches +// meta-data about structs, and an instance can be shared safely. +var decoder = schema.NewDecoder() + var templates map[string]*template.Template var database persistence.Database +// ======= Q: Query ======= +type TorrentsQ struct { + Epoch *int64 `schema:"epoch"` + Query *string `schema:"query"` + OrderBy *string `schema:"orderBy"` + Ascending *bool `schema:"ascending"` + LastOrderedValue *float64 `schema:"lastOrderedValue"` + LastID *uint64 `schema:"lastID"` +} + // ========= TD: TemplateData ========= type HomepageTD struct { NTorrents uint @@ -41,7 +51,7 @@ type TorrentsTD struct { SortedBy string NextPageExists bool Epoch int64 - LastOrderedValue uint64 + LastOrderedValue float64 LastID uint64 } @@ -72,12 +82,20 @@ func main() { router := mux.NewRouter() router.HandleFunc("/", rootHandler) + + router.HandleFunc("/api/v0.1/torrents", apiTorrentsHandler) + router.HandleFunc("/api/v0.1/torrents/{infohash:[a-z0-9]{40}}", apiTorrentsInfohashHandler) + router.HandleFunc("/api/v0.1/files/{infohash:[a-z0-9]{40}}", apiFilesInfohashHandler) + router.HandleFunc("/api/v0.1/statistics", apiStatisticsHandler) + router.HandleFunc("/torrents", torrentsHandler) router.HandleFunc("/torrents/{infohash:[a-z0-9]{40}}", torrentsInfohashHandler) router.HandleFunc("/statistics", statisticsHandler) + router.HandleFunc("/feed", feedHandler) + router.PathPrefix("/static").HandlerFunc(staticHandler) - router.HandleFunc("/feed", feedHandler) + templateFunctions := template.FuncMap{ "add": func(augend int, addends int) int { @@ -118,7 +136,7 @@ func main() { templates["homepage"] = template.Must(template.New("homepage").Funcs(templateFunctions).Parse(string(mustAsset("templates/homepage.html")))) // templates["statistics"] = template.Must(template.New("statistics").Parse(string(mustAsset("templates/statistics.html")))) // templates["torrent"] = template.Must(template.New("torrent").Funcs(templateFunctions).Parse(string(mustAsset("templates/torrent.html")))) - templates["torrents"] = template.Must(template.New("torrents").Funcs(templateFunctions).Parse(string(mustAsset("templates/torrents.html")))) + // templates["torrents"] = template.Must(template.New("torrents").Funcs(templateFunctions).Parse(string(mustAsset("templates/torrents.html")))) var err error database, err = persistence.MakeDatabase("sqlite3:///home/bora/.local/share/magneticod/database.sqlite3", logger) @@ -126,8 +144,14 @@ func main() { panic(err.Error()) } + decoder.IgnoreUnknownKeys(false) + decoder.ZeroEmpty(true) + zap.L().Info("magneticow is ready to serve!") - http.ListenAndServe(":8080", router) + err = http.ListenAndServe(":10101", router) + if err != nil { + zap.L().Error("ListenAndServe error", zap.Error(err)) + } } // DONE @@ -136,84 +160,26 @@ func rootHandler(w http.ResponseWriter, r *http.Request) { if err != nil { panic(err.Error()) } - templates["homepage"].Execute(w, HomepageTD{ + + err = templates["homepage"].Execute(w, HomepageTD{ NTorrents: nTorrents, }) + if err != nil { + panic(err.Error()) + } } +// TODO: I think there is a standard lib. function for this func respondError(w http.ResponseWriter, statusCode int, format string, a ...interface{}) { w.WriteHeader(statusCode) w.Write([]byte(fmt.Sprintf(format, a...))) } +// TODO: we might as well move torrents.html into static... func torrentsHandler(w http.ResponseWriter, r *http.Request) { - // TODO: Parsing URL Query is tedious and looks stupid... can we do better? - queryValues := r.URL.Query() - - var query string - epoch := time.Now().Unix() // epoch, if not supplied, is NOW. - var lastOrderedValue, lastID *uint64 - - if query = queryValues.Get("query"); query == "" { - respondError(w, 400, "query is missing") - return - } - - if queryValues.Get("epoch") != "" && queryValues.Get("lastOrderedValue") != "" && queryValues.Get("lastID") != "" { - var err error - - epoch, err = strconv.ParseInt(queryValues.Get("epoch"), 10, 64) - if err != nil { - respondError(w, 400, "error while parsing epoch: %s", err.Error()) - return - } - if epoch <= 0 { - respondError(w, 400, "epoch has to be greater than zero") - return - } - - *lastOrderedValue, err = strconv.ParseUint(queryValues.Get("lastOrderedValue"), 10, 64) - if err != nil { - respondError(w, 400, "error while parsing lastOrderedValue: %s", err.Error()) - return - } - if *lastOrderedValue <= 0 { - respondError(w, 400, "lastOrderedValue has to be greater than zero") - return - } - - *lastID, err = strconv.ParseUint(queryValues.Get("lastID"), 10, 64) - if err != nil { - respondError(w, 400, "error while parsing lastID: %s", err.Error()) - return - } - if *lastID <= 0 { - respondError(w, 400, "lastID has to be greater than zero") - return - } - } else if !(queryValues.Get("epoch") == "" && queryValues.Get("lastOrderedValue") == "" && queryValues.Get("lastID") == "") { - respondError(w, 400, "`epoch`, `lastOrderedValue`, `lastID` must be supplied altogether, if supplied.") - return - } - - torrents, err := database.QueryTorrents(query, epoch, persistence.ByRelevance, true, 20, nil, nil) - if err != nil { - respondError(w, 400, "query error: %s", err.Error()) - return - } - - if torrents == nil { - panic("torrents is nil!!!") - } - - templates["torrents"].Execute(w, TorrentsTD{ - CanLoadMore: true, - Query: query, - SubscriptionURL: "borabora", - Torrents: torrents, - SortedBy: "anan", - NextPageExists: true, - }) + data := mustAsset("templates/torrents.html") + w.Header().Set("Content-Type", http.DetectContentType(data)) + w.Write(data) } func torrentsInfohashHandler(w http.ResponseWriter, r *http.Request) { diff --git a/cmd/magneticow/main_test.go b/cmd/magneticow/main_test.go new file mode 100644 index 0000000..e97baff --- /dev/null +++ b/cmd/magneticow/main_test.go @@ -0,0 +1,95 @@ +package main + +import ( + "fmt" + "testing" +) + +type schemaStruct struct { + PString *string `schema:"pstring"` + PUint64 *uint64 `schema:"puint64"` + PBool *bool `schema:"pbool"` + + String string `schema:"string"` + Uint64 uint64 `schema:"uint64"` + Bool bool `schema:"bool"` +} + +type schemaRStruct struct { + Uint64 uint64 `schema:"ruint64,required"` // https://github.com/gorilla/schema/pull/68 +} + +// TestSchemaUnsuppliedNil tests that unsupplied values yield nil. +func TestSchemaUnsuppliedNil(t *testing.T) { + ss := new(schemaStruct) + if err := decoder.Decode(ss, make(map[string][]string)); err != nil { + t.Error("decoding error", err.Error()) + } + + if ss.PString != nil { t.Error("PString is not nil") } + if ss.PUint64 != nil { t.Error("PUint64 is not nil") } + if ss.PBool != nil { t.Error("PBool is not nil") } +} + +// TestSchemaInvalidUint64 tests that an invalid uint64 value yields nil. +func TestSchemaInvalidUint64(t *testing.T) { + dict := make(map[string][]string) + dict["puint64"] = []string{"-1"} + + ss := new(schemaStruct) + err := decoder.Decode(ss, dict) + if err == nil { t.Error("err is nil") } +} + +// TestSchemaInvalidBool tests that an invalid bool value yields nil. +func TestSchemaInvalidBool(t *testing.T) { + dict := make(map[string][]string) + dict["pbool"] = []string{"yyy"} + + ss := new(schemaStruct) + err := decoder.Decode(ss, dict) + if err == nil { t.Error("err is nil") } +} + +// TestSchemaOverflow tests that integer values greater than the maximum value a field can store +// leads to decoding errors, rather than silent overflows. +func TestSchemaOverflow(t *testing.T) { + dict := make(map[string][]string) + dict["puint64"] = []string{"18446744073709551616"} // 18,446,744,073,709,551,615 + 1 + + ss := new(schemaStruct) + err := decoder.Decode(ss, dict) + if err == nil { t.Error("err is nil") } +} + +// TestSchemaEmptyString tests that empty string yields nil. +func TestSchemaEmptyString(t *testing.T) { + dict := make(map[string][]string) + dict["pstring"] = []string{""} + + ss := new(schemaStruct) + if err := decoder.Decode(ss, make(map[string][]string)); err != nil { + t.Error("decoding error", err.Error()) + } + + if ss.PString != nil { t.Error("PString is not nil") } +} + +// TestSchemaDefault tests if unsupplied values defaults to "zero" and doesn't err +func TestSchemaDefault(t *testing.T) { + ss := new(schemaStruct) + if err := decoder.Decode(ss, make(map[string][]string)); err != nil { + t.Error("decoding error", err.Error()) + } + + if ss.String != "" { t.Error("String is not empty") } + if ss.Uint64 != 0 { t.Error("Uint64 is not 0") } + if ss.Bool != false { t.Error("Bool is not false") } +} + +func TestSchemaRequired(t *testing.T) { + rs := new(schemaRStruct) + err := decoder.Decode(rs, make(map[string][]string)) + if err == nil { t.Error("err is nil") } + fmt.Printf(err.Error()) +} \ No newline at end of file diff --git a/pkg/persistence/interface.go b/pkg/persistence/interface.go index 5439084..9772c53 100644 --- a/pkg/persistence/interface.go +++ b/pkg/persistence/interface.go @@ -1,6 +1,8 @@ package persistence import ( + "encoding/hex" + "encoding/json" "fmt" "net/url" @@ -21,13 +23,15 @@ type Database interface { // * that match the @query if it's not empty, else all torrents // * ordered by the @orderBy in ascending order if @ascending is true, else in descending order // after skipping (@page * @pageSize) torrents that also fits the criteria above. + // + // On error, returns (nil, error), otherwise a non-nil slice of TorrentMetadata and nil. QueryTorrents( query string, epoch int64, orderBy orderingCriteria, ascending bool, limit uint, - lastOrderedValue *uint64, + lastOrderedValue *float64, lastID *uint64, ) ([]TorrentMetadata, error) // GetTorrents returns the TorrentExtMetadata for the torrent of the given InfoHash. Will return @@ -67,11 +71,24 @@ type File struct { } type TorrentMetadata struct { - InfoHash []byte - Name string - Size uint64 - DiscoveredOn int64 - NFiles uint + ID uint64 `json:"id"` + InfoHash []byte `json:"infoHash"` // marshalled differently + Name string `json:"name"` + Size uint64 `json:"size"` + DiscoveredOn int64 `json:"discoveredOn"` + NFiles uint `json:"nFiles"` + Relevance float64 `json:"relevance"` +} + +func (tm *TorrentMetadata) MarshalJSON() ([]byte, error) { + type Alias TorrentMetadata + return json.Marshal(&struct { + InfoHash string `json:"infoHash"` + *Alias + }{ + InfoHash: hex.EncodeToString(tm.InfoHash), + Alias: (*Alias)(tm), + }) } func MakeDatabase(rawURL string, logger *zap.Logger) (Database, error) { diff --git a/pkg/persistence/sqlite3.go b/pkg/persistence/sqlite3.go index f002563..603c853 100644 --- a/pkg/persistence/sqlite3.go +++ b/pkg/persistence/sqlite3.go @@ -166,7 +166,7 @@ func (db *sqlite3Database) QueryTorrents( orderBy orderingCriteria, ascending bool, limit uint, - lastOrderedValue *uint64, + lastOrderedValue *float64, lastID *uint64, ) ([]TorrentMetadata, error) { if query == "" && orderBy == ByRelevance { @@ -177,15 +177,21 @@ func (db *sqlite3Database) QueryTorrents( } doJoin := query != "" - firstPage := true // lastID != nil + firstPage := lastID == nil // executeTemplate is used to prepare the SQL query, WITH PLACEHOLDERS FOR USER INPUT. sqlQuery := executeTemplate(` - SELECT info_hash + SELECT id + , info_hash , name , total_size , discovered_on , (SELECT COUNT(*) FROM files WHERE torrents.id = files.torrent_id) AS n_files + {{ if .DoJoin }} + , idx.rank + {{ else }} + , 0 + {{ end }} FROM torrents {{ if .DoJoin }} INNER JOIN ( @@ -250,7 +256,16 @@ func (db *sqlite3Database) QueryTorrents( torrents := make([]TorrentMetadata, 0) for rows.Next() { var torrent TorrentMetadata - if err = rows.Scan(&torrent.InfoHash, &torrent.Name, &torrent.Size, &torrent.DiscoveredOn, &torrent.NFiles); err != nil { + err = rows.Scan( + &torrent.ID, + &torrent.InfoHash, + &torrent.Name, + &torrent.Size, + &torrent.DiscoveredOn, + &torrent.NFiles, + &torrent.Relevance, + ) + if err != nil { return nil, err } torrents = append(torrents, torrent)