func (s sortByOutline) Less(i, j int) bool { var sortByLevelFirst bool = true if sortByLevelFirst { lvl1 := strings.Count(s[i].Outline, ".") lvl2 := strings.Count(s[j].Outline, ".") if lvl1 < lvl2 { return true } if lvl1 > lvl2 { return false } } // A pure number comparison // 1.1, 1.2, 2.1, 2.1.1. st1 := strings.Split(s[i].Outline, ".") st2 := strings.Split(s[j].Outline, ".") for idx, v1 := range st1 { if idx > len(st2)-1 { // i.e. 2.37.2 > 2. return false } v2 := st2[idx] if util.Stoi(v1) < util.Stoi(v2) { return true } if util.Stoi(v1) > util.Stoi(v2) { return false } } // i.e. 2 < 2.26.1.1 return true }
func backend3(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { c := appengine.NewContext(r) var nColsBlock = 4 if r.FormValue("nColsBlock") != "" { nColsBlock = util.Stoi(r.FormValue("nColsBlock")) } var nColsViewport = 6 if r.FormValue("nColsViewport") != "" { nColsViewport = util.Stoi(r.FormValue("nColsViewport")) } myB0.VB1 = X myB0.NumB1 = len(myB0.VB1) myB0.NumB2 = 0 myB0.NColsViewport = nColsViewport // compute basic meta data for i1, _ := range myB0.VB1 { myB0.NumB2 += len(myB0.VB1[i1].VB2) for i2, _ := range myB0.VB1[i1].VB2 { // number of chars ro := myB0.VB1[i1].VB2[i2] // read only myB0.VB1[i1].VB2[i2].Size = len(ro.Linktext) + len(ro.Description) myB0.VB1[i1].VB2[i2].EditorialIndex = i2 } } // compute NCols - NRows for the block for i1, _ := range myB0.VB1 { myB0.VB1[i1].NCols = nColsBlock if myB0.VB1[i1].NColsEditorial > 0 { myB0.VB1[i1].NCols = myB0.VB1[i1].NColsEditorial } if len(myB0.VB1[i1].VB2) < nColsBlock && len(myB0.VB1[i1].VB2) > 0 { myB0.VB1[i1].NCols = len(myB0.VB1[i1].VB2) } myB0.VB1[i1].NRows = complementRowsOrCols(len(myB0.VB1[i1].VB2), myB0.VB1[i1].NCols) myB0.VB1[i1].Discrepancy = myB0.VB1[i1].NCols*myB0.VB1[i1].NRows - len(myB0.VB1[i1].VB2) myB0.MaxNCols = util.Max(myB0.MaxNCols, myB0.VB1[i1].NCols) myB0.MaxNRows = util.Max(myB0.MaxNRows, myB0.VB1[i1].NRows) } // compute NCols - NRows - sizeup to MaxNRows for i1, _ := range myB0.VB1 { if myB0.VB1[i1].NRows < myB0.MaxNRows { myB0.VB1[i1].NRows = myB0.MaxNRows myB0.VB1[i1].NCols = complementRowsOrCols(len(myB0.VB1[i1].VB2), myB0.VB1[i1].NRows) myB0.VB1[i1].Discrepancy = myB0.VB1[i1].NCols*myB0.VB1[i1].NRows - len(myB0.VB1[i1].VB2) } } // is first or last for i1, _ := range myB0.VB1 { for i2, _ := range myB0.VB1[i1].VB2 { myB0.VB1[i1].VB2[i2].IsFirst = false myB0.VB1[i1].VB2[i2].IsLast = false if i2%myB0.VB1[i1].NCols == 0 { myB0.VB1[i1].VB2[i2].IsFirst = true } if i2%myB0.VB1[i1].NCols == (myB0.VB1[i1].NCols - 1) { myB0.VB1[i1].VB2[i2].IsLast = true } //aelog.Infof(c,"first-last %v %v \n", i2, i2%myB0.VB1[i1].NCols) } } // create slices with the data to be sorted for i1, _ := range myB0.VB1 { sh1 := make([]Order, len(myB0.VB1[i1].VB2)) myB0.VB1[i1].BySize = ByInt(sh1) sh2 := make([]Order, len(myB0.VB1[i1].VB2)) myB0.VB1[i1].ByHeading = ByStr(sh2) // fill in the data - to be sorted later for i2, _ := range myB0.VB1[i1].VB2 { ro := myB0.VB1[i1].VB2[i2] // read only myB0.VB1[i1].BySize[i2].IdxSrc = i2 myB0.VB1[i1].BySize[i2].ByI = len(ro.Linktext) + len(ro.Description) myB0.VB1[i1].ByHeading[i2].IdxSrc = i2 myB0.VB1[i1].ByHeading[i2].ByS = strings.ToLower(ro.Linktext) } } // actual rearranging of the sorting date for i1, _ := range myB0.VB1 { sort.Sort(myB0.VB1[i1].BySize) sort.Sort(myB0.VB1[i1].ByHeading) aelog.Infof(c, "-- Sorting %v", myB0.VB1[i1].Heading) // for i, v := range myB0.VB1[i1].BySize { // aelog.Infof(c,"---- %v %v %v", i, v.IdxSrc, v.ByI) // } // for i, v := range myB0.VB1[i1].ByHeading { // aelog.Infof(c,"---- %v %v %v", i, v.IdxSrc, v.ByS) // } } path := m["dir"].(string) + m["base"].(string) cntr, _ := sc.Count(c, path) add, tplExec := tplx.FuncTplBuilder(w, r) add("n_html_title", "Backend", nil) add("n_cont_0", "<style>"+htmlfrag.CSSColumnsWidth(nColsViewport)+"</style>", "") add("n_cont_1", tplx.PrefixLff+"backend3_body", myB0) add("tpl_legend", tplx.PrefixLff+"backend3_body_embed01", "") add("n_cont_2", "<p>{{.}} views</p>", cntr) sDumped := "" //sDumped = spew.Sdump(myB0) add("n_cont_3", "<pre>{{.}} </pre>", sDumped) tplExec(w, r) }
func foscamToggle(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { htmlfrag.SetNocacheHeaders(w) w.Header().Set("Content-Type", "text/html; charset=utf-8") ssecs := r.FormValue("sleep") if ssecs != "" { secs := util.Stoi(ssecs) wpf(w, "sleeping %v secs ... <br><br>\n", secs) time.Sleep(time.Duration(secs) * time.Second) } prevStat := makeRequest(w, r, path_get_alarm) wpf(w, "||%s||<br>\n", prevStat.IsEnable) if strings.TrimSpace(prevStat.IsEnable) == "0" { prevStat.IsEnable = "1" } else { prevStat.IsEnable = "0" } prevStat.Area0 = "255" prevStat.Area1 = "255" prevStat.Area2 = "255" prevStat.Area3 = "255" prevStat.Area4 = "255" prevStat.Area5 = "255" prevStat.Area6 = "255" prevStat.Area7 = "255" prevStat.Area8 = "255" prevStat.Area9 = "255" // ugly: XML dump to query string s2 := spf("%+v", prevStat) s2 = strings.Trim(s2, "{}") s2 = strings.Replace(s2, ":", "=", -1) s2 = strings.Replace(s2, " ", "&", -1) // even worse: we have to lower the case again pairs := strings.Split(s2, "&") recombined := "" for i, v := range pairs { fchar := v[:1] fchar = strings.ToLower(fchar) recombined += fchar + v[1:] if i < len(pairs)-1 { recombined += "&" } } wpf(w, "<pre>") // disS2 := stringspb.Breaker(s2, 50) // for _, v := range disS2 { // wpf(w, "%v\n", v) // } disRecombined := stringspb.Breaker(recombined, 50) for _, v := range disRecombined { wpf(w, "%v\n", v) } wpf(w, "</pre>") // wpf(w, "<pre>%v</pre>\n", recombined) toggleRes := makeRequest(w, r, path_set_alarm+"&"+recombined) if toggleRes.Result == "0" { wpf(w, "<br>end foscam toggle - success<br>\n") if prevStat.IsEnable == "0" { wpf(w, "<b>DISabled</b><br>\n") } else { wpf(w, "<b>ENabled</b><br>\n") } } }
func FetchAndDecodeJSON(r *http.Request, surl, knownProtocol string, lg loghttp.FuncBufUniv, fs fsi.FileSystem) []repo.FullArticle { fullURL := fmt.Sprintf("%s%s?%s=%s&cnt=%v&prot=%v", routes.AppHost(), routes.FetchSimilarURI, routes.URLParamKey, surl, numTotal-1, knownProtocol) // fullURL = fmt.Sprintf("%s%s?%s=%s&cnt=%v", r.URL.Host, repo.routes.FetchSimilarURI, // routes.URLParamKey, surl, numTotal-1) lg("lo fetching %v", fullURL) start := time.Now() fo := fetch.Options{} fo.URL = fullURL bJSON, inf, err := fetch.UrlGetter(r, fo) _ = inf lg(err) if err != nil { lg("msg %v", inf.Msg) return nil } if len(bJSON) == 0 { lg("empty bJSON") return nil } lg("\t\tfetch resp complete after %4.2v secs; %vkB", time.Now().Sub(start).Seconds(), len(bJSON)/1024) var mp map[string][]byte err = json.Unmarshal(bJSON, &mp) lg(err) if err != nil { if _, ok := mp["msg"]; ok { lg("%s", mp["msg"]) } else { lg("%s", bJSON) } return nil } smaxFound := string(mp["lensimilar"]) maxFound := util.Stoi(smaxFound) if maxFound < numTotal-1 { lg("not enough files returned by FetchSimilar 1 - mp[lensimilar] too small: %s", mp["lensimilar"]) return nil } least3Files := make([]repo.FullArticle, maxFound+1) _, ok1 := mp["url_self"] _, ok2 := mp["mod_self"] _, ok3 := mp["bod_self"] if ok1 && ok2 && ok3 { least3Files[0].Url = string(mp["url_self"]) least3Files[0].Mod, err = time.Parse(http.TimeFormat, string(mp["mod_self"])) lg(err) least3Files[0].Body = mp["bod_self"] if len(least3Files[0].Body) < 200 { if !bytes.Contains(least3Files[0].Body, []byte(fetch.MsgNoRdirects)) { lg("found base but its a redirect") return nil } } } lg("found base") for k, v := range mp { if k == "msg" { continue } if strings.HasSuffix(k, "self") { continue } if strings.HasPrefix(k, "url__") { sval := strings.TrimPrefix(k, "url__") val := util.Stoi(sval) // lg("%v %v %s", sval, val, v) least3Files[val+1].Url = string(v) } if strings.HasPrefix(k, "mod__") { sval := strings.TrimPrefix(k, "mod__") val := util.Stoi(sval) // lg("%v %v %s", sval, val, v) least3Files[val+1].Mod, err = time.Parse(http.TimeFormat, string(v)) lg(err) } if strings.HasPrefix(k, "bod__") { sval := strings.TrimPrefix(k, "bod__") val := util.Stoi(sval) least3Files[val+1].Body = v //html.EscapeString(string(v) } } lg("found %v similar; decoding complete after %4.2v secs", maxFound, time.Now().Sub(start).Seconds()) for _, v := range least3Files { lg("%v %v", v.Url, len(v.Body)) } return least3Files }
// McacheGet is our universal memcache retriever. // Both scalars and structs are returned. // // Sadly, structs can only be casted into an *existing* object of the desired type. // There is no way to create an object of desired type dynamically and return it. // Therefore we need a pre-created object as argument for returning. // // Even for scalar values, the argument moldForReturn is required // to indicate the scalar or struct type // // In addition, the returned value of type interface{} must be cumbersomely // casted by the callee - thus the return value solution is always // worse than simply passing a pre-created argument. // // For scalar values, the package has the types WrapString, WrapInt // // Todo: WrapString, WrapInt could be saved without JSON func McacheGet(c appengine.Context, skey string, moldForReturn interface{}) bool { tMold := reflect.TypeOf(moldForReturn) stMold := tMold.Name() // strangely this is empty stMold = fmt.Sprintf("%T", moldForReturn) // unlike this msg1 := fmt.Sprintf("mcache requ type %s - key %v", stMold, skey) if stMold == "string" || stMold == "int" || stMold == "*dsu.WrapInt" || stMold == "*dsu.WrapString" { c.Infof("%s %s", "scalar", msg1) miGet, err := memcache.Get(c, skey) if err != nil && err != memcache.ErrCacheMiss { panic(err) } if err == memcache.ErrCacheMiss { if stMold == "int" { return false //xx } else { return false //xx } } //var rval interface{} if stMold == "int" { panic("use wrappers") //rval = util.Stoi(string(miGet.Value)) } if stMold == "string" { //rval = string(miGet.Value) panic("use wrappers") } if stMold == "*dsu.WrapInt" { tmp := moldForReturn.(*WrapInt) tmp.I = util.Stoi(string(miGet.Value)) } if stMold == "*dsu.WrapString" { tmp := moldForReturn.(*WrapString) tmp.S = string(miGet.Value) } c.Infof(" mcache got scalar - key %v %v", skey, moldForReturn) return true //xx } else { c.Infof("%s %s", "objct", msg1) unparsedjson, err := memcache.JSON.Get(c, skey, &moldForReturn) _ = unparsedjson if err != nil && err != memcache.ErrCacheMiss { panic(err) } if err == memcache.ErrCacheMiss { return false //xx } c.Infof(" mcache got obj - key %v", skey) return true //xx } }