func runTestX( w http.ResponseWriter, r *http.Request, f1 func() string, f2 func(fsi.FileSystem) (*bytes.Buffer, string), ) { wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Run a test"})) defer wpf(w, tplx.Foot) wpf(w, "<pre>\n") defer wpf(w, "\n</pre>") if f1 == nil { f1 = dsfs.MountPointLast } mnt := f1() fs := getFS(appengine.NewContext(r), mnt) bb := new(bytes.Buffer) msg := "" wpf(bb, "created fs %v\n\n", mnt) bb, msg = f2(fs) w.Write([]byte(msg)) w.Write([]byte("\n\n")) w.Write(bb.Bytes()) }
// FetchHTML executes the fetch commands. // It creates the configured filesystem and calls the fetcher. func FetchHTML(w http.ResponseWriter, r *http.Request, fcs []FetchCommand) { lg, lge := loghttp.Logger(w, r) var err error fs := GetFS(appengine.NewContext(r)) // fs = fsi.FileSystem(memMapFileSys) wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Requesting files"})) defer wpf(w, tplx.Foot) wpf(w, "<pre>") defer wpf(w, "</pre>") err = fs.WriteFile(path.Join(docRoot, "msg.html"), msg, 0644) lge(err) // err = fs.WriteFile(path.Join(docRoot, "index.html"), []byte("content of index.html"), 0644) // lge(err) err = fs.MkdirAll(path.Join(docRoot, "testDirX/testDirY"), 0755) lge(err) for _, config := range fcs { FetchUsingRSS(w, r, fs, config) } lg("fetching complete") }
func setFSType(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Set filesystem type"})) defer wpf(w, tplx.Foot) stp := r.FormValue("type") newTp, err := strconv.Atoi(stp) if err == nil && newTp >= 0 && newTp <= 2 { whichType = newTp wpf(w, "new type: %v<br><br>\n", whichType) } if whichType != 0 { wpf(w, "<a href='%v?type=0' >dsfs</a><br>\n", UriSetFSType) } else { wpf(w, "<b>dsfs</b><br>\n") } if whichType != 1 { wpf(w, "<a href='%v?type=1' >osfs</a><br>\n", UriSetFSType) } else { wpf(w, "<b>osfs</b><br>\n") } if whichType != 2 { wpf(w, "<a href='%v?type=2' >memfs</a><br>\n", UriSetFSType) } else { wpf(w, "<b>memfs</b><br>\n") } }
func HandleHomeVari(w http.ResponseWriter, r *http.Request, successLandingURL, signoutLandingURL string) { format := ` <a href='%v?mode=select'>Signin with Redirect (Widget)</a><br><br> <a href='%v'>Signin Success Landing</a><br><br> <a href='%v'>Signout </a><br><br> <a href='%v'>Signout Landing</a><br> <a href='%v'>Branding for Account Chooser</a><br> ` str := fmt.Sprintf(format, WidgetSigninAuthorizedRedirectURL, successLandingURL, signOutURL, signoutLandingURL, accountChooserBrandingURL, ) bstpl := tplx.TemplateFromHugoPage(w, r) // the jQuery irritates fmt.Fprintf(w, tplx.ExecTplHelper(bstpl, map[string]interface{}{ "HtmlTitle": "Google Identity Toolkit Overview", "HtmlDescription": "", // reminder "HtmlContent": template.HTML(str), })) }
func GetHomeTpl(w http.ResponseWriter, r *http.Request, title, body string) *template.Template { if body == "" { body = IDCardHTML + UserInfoHTML } lg, _ := loghttp.BuffLoggerUniversal(w, r) bstpl := tplx.TemplateFromHugoPage(w, r) b := new(bytes.Buffer) fmt.Fprintf(b, tplx.ExecTplHelper(bstpl, map[string]interface{}{ // "HtmlTitle": "{{ .HtmlTitle }}", // this seems to cause problems sometimes. "HtmlTitle": title, "HtmlDescription": "", // reminder "HtmlHeaders": template.HTML(Headers), "HtmlContent": template.HTML(body), })) intHomeTemplate, err := template.New("home").Parse(b.String()) lg(err) return intHomeTemplate }
func resetMountPoint(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Mountpoint reset"})) defer wpf(w, tplx.Foot) wpf(w, "<pre>\n") defer wpf(w, "\n</pre>") wpf(w, "reset %v\n", dsfs.MountPointReset()) }
func decrMountPoint(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Mountpoint decrement"})) defer wpf(w, tplx.Foot) wpf(w, "<pre>\n") defer wpf(w, "\n</pre>") wpf(w, "counted down %v\n", dsfs.MountPointDecr()) }
func deleteAll(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { lg, _ := loghttp.BuffLoggerUniversal(w, r) err := r.ParseForm() lg(err) wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Delete all filesystem data"})) defer wpf(w, tplx.Foot) confirm := r.FormValue("confirm") if confirm != "yes" { wpf(w, "All dsfs contents are deletes. All memfs contents are deleted<br>\n") wpf(w, "Put a get param into the URL ?confirm - and set it to 'yes'<br>\n") wpf(w, "Put a get param 'mountname' into url; i.e. mountname=mntftch<br>\n") return } wpf(w, "<pre>\n") defer wpf(w, "\n</pre>") // // fs := dsfs.New( dsfs.AeContext(appengine.NewContext(r)), ) mountName := r.FormValue("mountname") if mountName != "" { wpf(w, "mountame = "+mountName+"\n") fs = dsfs.New( dsfs.AeContext(appengine.NewContext(r)), dsfs.MountName(mountName), ) } wpf(w, "dsfs:\n") msg, err := fs.DeleteAll() if err != nil { wpf(w, "err during delete %v\n", err) } wpf(w, msg) memMapFileSys = memfs.New() wpf(w, "\n") wpf(w, "memMapFs new") // cleanup must be manual osFileSys = osfs.New() }
func incrMountPoint(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Mountpoint increment"})) defer wpf(w, "\n</pre>") wpf(w, "<pre>\n") defer wpf(w, tplx.Foot) xx := r.Header.Get("adapter_01") wpf(w, "adapter set %q\n", xx) wpf(w, "counted up %v\n", dsfs.MountPointIncr()) }
func sendUpload(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { lg, _ := loghttp.Logger(w, r) // c := appengine.NewContext(r) wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Post an Upload"})) defer wpf(w, tplx.Foot) tData := map[string]string{"Url": UrlUploadReceive} err := tplBase.ExecuteTemplate(w, "tplName01", tData) if err != nil { lg("tpl did not compile: %v", err) } }
func DeleteSubtree(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { lg, lge := loghttp.Logger(w, r) err := r.ParseForm() lge(err) wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Delete Subtree for curr FS"})) defer wpf(w, tplx.Foot) if r.Method == "POST" { wpf(w, "<pre>\n") defer wpf(w, "\n</pre>") mountPoint := dsfs.MountPointLast() if len(r.FormValue("mountname")) > 0 { mountPoint = r.FormValue("mountname") } lg("mount point is %v", mountPoint) pathPrefix := "impossible-value" if len(r.FormValue("pathprefix")) > 0 { pathPrefix = r.FormValue("pathprefix") } lg("pathprefix is %v", pathPrefix) fs := getFS(appengine.NewContext(r), mountPoint) lg("created fs %v-%v ", fs.Name(), fs.String()) lg("removing %q - and its subtree ...", pathPrefix) err := fs.RemoveAll(pathPrefix) lge(err) errMc := memcache.Flush(appengine.NewContext(r)) lge(errMc) if err == nil && errMc == nil { lg("success") } } else { tData := map[string]string{"Url": UriDeleteSubtree} err := tplBase.ExecuteTemplate(w, "tplName01", tData) lge(err) } }
func handleSignOutLanding(w http.ResponseWriter, r *http.Request) { format := ` Signed out<br> <a href='%v'>Home</a><br> ` str := fmt.Sprintf(format, homeURL) bstpl := tplx.TemplateFromHugoPage(w, r) // the jQuery irritates fmt.Fprintf(w, tplx.ExecTplHelper(bstpl, map[string]interface{}{ "HtmlTitle": "Google Identity Toolkit Overview", "HtmlDescription": "", // reminder "HtmlContent": template.HTML(str), })) }
// Submit test commands by http posting them. func staticFetchViaPosting2Receiver(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { lg, lge := loghttp.Logger(w, r) wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "JSON Post"})) defer wpf(w, tplx.Foot) wpf(w, "<pre>") defer wpf(w, "</pre>") b, err := Post2Receiver(r, testCommands) lge(err) lg("msg from Post2Receiver:") lg(b.String()) }
func GetIDCardTpl(w http.ResponseWriter, r *http.Request, u *User, argSignoutURL string, argRedirectSuccess string) string { b := new(bytes.Buffer) if argRedirectSuccess != "" { argRedirectSuccess = "?mode=select&user=wasNil&red=" + argRedirectSuccess } fmt.Fprintf(b, tplx.ExecTplHelper(IDCardHTML, map[string]interface{}{ "WidgetURL": WidgetSigninAuthorizedRedirectURL + argRedirectSuccess, "SignOutURL": argSignoutURL, "User": u, // "CookieDump": template.HTML(htmlfrag.CookieDump(r)), })) return b.String() }
func getHomeTpl(w http.ResponseWriter, r *http.Request) *template.Template { lg, _ := loghttp.BuffLoggerUniversal(w, r) bstpl := tplx.TemplateFromHugoPage(w, r) b := new(bytes.Buffer) fmt.Fprintf(b, tplx.ExecTplHelper(bstpl, map[string]interface{}{ // "HtmlTitle": "{{ .HtmlTitle }}", // this seems to cause problems sometimes. "HtmlTitle": "Member Area", "HtmlDescription": "", // reminder "HtmlHeaders": template.HTML(Headers), "HtmlContent": template.HTML(home1 + "\n<br><br>\n" + home2), })) intHomeTemplate, err := template.New("home").Parse(b.String()) lg(err) return intHomeTemplate }
func GetWidgetTpl(w http.ResponseWriter, r *http.Request, title string) *template.Template { lg, _ := loghttp.BuffLoggerUniversal(w, r) bstpl := tplx.TemplateFromHugoPage(w, r) // the jQuery irritates // bstpl := tplx.HugoTplNoScript b := new(bytes.Buffer) fmt.Fprintf(b, tplx.ExecTplHelper(bstpl, map[string]interface{}{ // "HtmlTitle": "{{ .HtmlTitle }}", // it DOES cause some eternal loop. But why only here? "HtmlTitle": title, "HtmlDescription": "", // reminder "HtmlHeaders": template.HTML(Headers), "HtmlContent": template.HTML(widgetHTML), })) intGitkitTemplate, err := template.New("widg").Parse(b.String()) lg(err) return intGitkitTemplate }
func confirmPay(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { /* http://abc.de/ef?input_transaction_hash=46178baf7de078954b5aebb71c12120b33d998faac1c165af195eae90f19b25c&shared=false&address=18tpXf8WWuhJP95JbDASbZvavmZJbrydut&destination_address=18tpXf8WWuhJP95JbDASbZvavmZJbrydut&input_address=1ZTnjSdknZvur9Gc73gvB8XBTWL7nV1m6&test=true&anonymous=false&confirmations=0&value=82493362&transaction_hash=46178baf7de078954b5aebb71c12120b33d998faac1c165af195eae90f19b25c */ lg, b := loghttp.BuffLoggerUniversal(w, r) closureOverBuf := func(bUnused *bytes.Buffer) { loghttp.Pf(w, r, b.String()) } defer closureOverBuf(b) // the argument is ignored, r.Header.Set("X-Custom-Header-Counter", "nocounter") wpf(b, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Payment confirmation"})) defer wpf(b, tplx.Foot) wpf(b, "<pre>") defer wpf(b, "</pre>") err := r.ParseForm() lg(err) custSecret := "" if r.FormValue("customsecret") != "" { custSecret = r.FormValue("customsecret") } lg("custom secret is %q", custSecret) val := "" if r.FormValue("value") != "" { val = r.FormValue("value") } lg("value is %q", val) }
func foscamWatch(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { htmlfrag.SetNocacheHeaders(w) w.Header().Set("Content-Type", "text/html; charset=utf-8") wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Foscam live watch"})) /* There is no way to access a real video stream. Thus we use this suggestion: http://foscam.us/forum/post43654.html#p43654 */ str := `<img width='640' src="http://` + dns_cam + `/CGIProxy.fcgi?cmd=snapPicture2&usr=visitor&pwd=visitor&t=" onload='setTimeout(function() {src = src.substring(0, (src.lastIndexOf("t=")+2))+(new Date()).getTime()}, 1000)' onerror='setTimeout(function() {src = src.substring(0, (src.lastIndexOf("t=")+2))+(new Date()).getTime()}, 5000)' alt='' />` w.Write([]byte(str)) w.Write([]byte(tplx.Foot)) }
// FetchSimilar is an extended version of Fetch // It is uses a DirTree of crawled *links*, not actual files. // As it moves up the DOM, it crawls every document for additional links. // It first moves up to find similar URLs on the same depth // /\ // /\ / \ // /\ / \ / \ // It then moves up the ladder again - to accept higher URLs // /\ // /\ // /\ func FetchSimilar(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { lg, b := loghttp.BuffLoggerUniversal(w, r) closureOverBuf := func(bUnused *bytes.Buffer) { loghttp.Pf(w, r, b.String()) } defer closureOverBuf(b) // the argument is ignored, r.Header.Set("X-Custom-Header-Counter", "nocounter") start := time.Now() wpf(b, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Find similar HTML URLs"})) defer wpf(b, tplx.Foot) wpf(b, "<pre>") defer wpf(b, "</pre>") fs1 := GetFS(appengine.NewContext(r)) err := r.ParseForm() lg(err) countSimilar := 3 sCountSimilar := r.FormValue("cnt") if sCountSimilar != "" { i, err := strconv.Atoi(strings.TrimSpace(sCountSimilar)) if err == nil { countSimilar = i } } surl := r.FormValue(routes.URLParamKey) ourl, err := fetch.URLFromString(surl) lg(err) if err != nil { return } if ourl.Host == "" { lg("host is empty (%v)", surl) return } knownProtocol := "" if r.FormValue("prot") != "" { knownProtocol = r.FormValue("prot") } numWorkers := 0 sNumWorkers := r.FormValue("numworkers") if sNumWorkers != "" { i, err := strconv.Atoi(strings.TrimSpace(sNumWorkers)) if err == nil { numWorkers = i } } srcDepth := strings.Count(ourl.Path, "/") cmd := FetchCommand{} cmd.Host = ourl.Host cmd.SearchPrefix = ourl.Path cmd = addDefaults(cmd) dirTree := &DirTree{Name: "/", Dirs: map[string]DirTree{}, EndPoint: true} fnDigest := path.Join(docRoot, cmd.Host, "digest2.json") loadDigest(w, r, lg, fs1, fnDigest, dirTree) // previous lg("dirtree 400 chars is %v end of dirtree\t\t", stringspb.ToLen(dirTree.String(), 400)) m1 := new(MyWorker) m1.r = r m1.lg = lg m1.fs1 = fs1 m1.SURL = path.Join(cmd.Host, ourl.Path) m1.Protocol = knownProtocol btsSrc, modSrc, usedExisting, err := fetchSave(m1) if !usedExisting { addAnchors(lg, cmd.Host, btsSrc, dirTree) } lg(err) if err != nil { return } lg("\t\t%4.2v secs so far 1", time.Now().Sub(start).Seconds()) var treePath string treePath = "/blogs/freeexchange" treePath = "/news/europe" treePath = path.Dir(ourl.Path) opt := LevelWiseDeeperOptions{} opt.Rump = treePath opt.ExcludeDir = "/news/americas" opt.ExcludeDir = "/blogs/buttonwood" opt.ExcludeDir = "/something-impossible" opt.MinDepthDiff = 1 opt.MaxDepthDiff = 1 opt.CondenseTrailingDirs = cmd.CondenseTrailingDirs opt.MaxNumber = cmd.DesiredNumber + 1 // one more for "self" opt.MaxNumber = cmd.DesiredNumber + 40 // collect more, 'cause we filter out those too old later var subtree *DirTree links := []FullArticle{} alreadyCrawled := map[string]struct{}{} MarkOuter: for j := 0; j < srcDepth; j++ { treePath = path.Dir(ourl.Path) MarkInner: // for i := 1; i < srcDepth; i++ { for i := 1; i < (srcDepth + 5); i++ { subtree, treePath = DiveToDeepestMatch(dirTree, treePath) lg("Looking from height %v to level %v - %v", srcDepth-i, srcDepth-j, treePath) if _, ok := alreadyCrawled[treePath]; ok { // lg("\t already digested %v", treePath) continue } m2 := new(MyWorker) m2.r = r m2.lg = lg m2.fs1 = fs1 m2.SURL = path.Join(cmd.Host, treePath) m2.Protocol = knownProtocol btsPar, _, usedExisting, err := fetchSave(m2) lg(err) if err != nil { return } alreadyCrawled[treePath] = struct{}{} if !usedExisting { addAnchors(lg, cmd.Host, btsPar, dirTree) } if subtree == nil { lg("\n#%v treePath %q ; subtree is nil", i, treePath) } else { // lg("\n#%v treePath %q ; subtree exists", i, treePath) opt.Rump = treePath opt.MinDepthDiff = i - j opt.MaxDepthDiff = i - j lvlLinks := LevelWiseDeeper(nil, nil, subtree, opt) links = append(links, lvlLinks...) for _, art := range lvlLinks { _ = art // lg("#%v fnd %v", i, stringspb.ToLen(art.Url, 100)) } if len(links) >= opt.MaxNumber { lg("found enough links") break MarkOuter } pathPrev := treePath treePath = path.Dir(treePath) // lg("#%v bef %v - aft %v", i, pathPrev, treePath) if pathPrev == "." && treePath == "." || pathPrev == "/" && treePath == "/" || pathPrev == "" && treePath == "." { lg("break to innner") break MarkInner } } } } // // // // lg("%v links after %4.2v secs", len(links), time.Now().Sub(start).Seconds()) lg("============================") lg("Now reading/fetching actual similar files - not just the links") // tried := 0 selecteds := []FullArticle{} nonExisting := []FullArticle{} nonExistFetched := []FullArticle{} for _, art := range links { if art.Url == ourl.Path { lg("skipping self\t%v", art.Url) continue } tried++ useExisting := false semanticUri := condenseTrailingDir(art.Url, cmd.CondenseTrailingDirs) p := path.Join(docRoot, cmd.Host, semanticUri) f, err := fs1.Open(p) // lg(err) // its no error if file does not exist if err != nil { // lg("!nstore %q", semanticUri) } else { // lg("reading %q", semanticUri) // lets put this into a func, so that f.close it called at the end of this func // otherwise defer f.close() spans the entire func and prevents // overwrites chmods further down f := func() { defer f.Close() fi, err := f.Stat() lg(err) if err != nil { } else { age := time.Now().Sub(fi.ModTime()) if age.Hours() < 10 { lg("\t\tusing existing file with age %4.2v hrs", age.Hours()) art.Mod = fi.ModTime() bts, err := ioutil.ReadAll(f) lg(err) art.Body = bts if len(bts) < 200 { if bytes.Contains(bts, []byte(fetch.MsgNoRdirects)) { return } } selecteds = append(selecteds, art) useExisting = true } } } f() } if !useExisting { nonExisting = append(nonExisting, art) } if len(selecteds) >= countSimilar { break } } lg("============================") lg("tried %v links - yielding %v existing similars; not existing in datastore: %v, %v were requested.", tried, len(selecteds), len(nonExisting), countSimilar) if len(selecteds) < countSimilar { jobs := make([]distrib.Worker, 0, len(nonExisting)) for _, art := range nonExisting { surl := path.Join(cmd.Host, art.Url) wrkr := MyWorker{SURL: surl} wrkr.Protocol = knownProtocol wrkr.r = r wrkr.lg = lg wrkr.fs1 = fs1 job := distrib.Worker(&wrkr) jobs = append(jobs, job) } opt := distrib.NewDefaultOptions() opt.TimeOutDur = 3500 * time.Millisecond opt.Want = int32(countSimilar - len(selecteds) + 4) // get some more, in case we have "redirected" bodies opt.NumWorkers = int(opt.Want) // 5s query limit; => hurry; spawn as many as we want if numWorkers > 0 { opt.NumWorkers = numWorkers } lg("Preparing %v simultaneous, wanting %v fetches; at %4.2v secs.", opt.NumWorkers, opt.Want, time.Now().Sub(start).Seconds()) opt.CollectRemainder = false // 5s query limit; => hurry; dont wait for stragglers ret, msg := distrib.Distrib(jobs, opt) lg("Distrib returned at %4.2v secs with %v results.", time.Now().Sub(start).Seconds(), len(ret)) lg("\n" + msg.String()) for _, v := range ret { v1, _ := v.Worker.(*MyWorker) if v1.FA != nil { age := time.Now().Sub(v1.FA.Mod) if age.Hours() < 10 { lg("\t\tusing fetched file with age %4.2v hrs", age.Hours()) nonExistFetched = append(nonExistFetched, *v1.FA) if len(nonExistFetched) > (countSimilar - len(selecteds)) { break } } } if v1.err != nil { lg(err) } } lg("tried %v links - yielding %v fetched - jobs %v", len(nonExisting), len(nonExistFetched), len(jobs)) selecteds = append(selecteds, nonExistFetched...) // // // Extract links for _, v := range nonExistFetched { // lg("links -> memory dirtree for %q", v.Url) addAnchors(lg, cmd.Host, v.Body, dirTree) } } // if time.Now().Sub(dirTree.LastFound).Seconds() < 10 { lg("saving accumulated (new) links to digest") saveDigest(lg, fs1, fnDigest, dirTree) } lg("\t\t%4.2v secs so far 3", time.Now().Sub(start).Seconds()) mp := map[string][]byte{} mp["msg"] = b.Bytes() mp["url_self"] = []byte(condenseTrailingDir(ourl.Path, cmd.CondenseTrailingDirs)) mp["mod_self"] = []byte(modSrc.Format(http.TimeFormat)) mp["bod_self"] = btsSrc for i, v := range selecteds { mp["url__"+spf("%02v", i)] = []byte(v.Url) mp["mod__"+spf("%02v", i)] = []byte(v.Mod.Format(http.TimeFormat)) mp["bod__"+spf("%02v", i)] = v.Body } mp["lensimilar"] = []byte(spf("%02v", len(selecteds))) // smp, err := json.MarshalIndent(mp, "", "\t") if err != nil { lg(b, "marshalling mp to []byte failed\n") return } r.Header.Set("X-Custom-Header-Counter", "nocounter") w.Header().Set("Content-Type", "application/json") w.Write(smp) b.Reset() // this keeps the buf pointer intact; outgoing defers are still heeded b = new(bytes.Buffer) // creates a *new* buf pointer; outgoing defers write into the *old* buf lg("\t\t%4.2v secs so far 4 (json resp written as []byte)", time.Now().Sub(start).Seconds()) return }
func receiveUpload(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { lg, _ := loghttp.Logger(w, r) c := appengine.NewContext(r) // parsing multipart before anything else err := r.ParseMultipartForm(1024 * 1024 * 2) if err != nil { lg("Multipart parsing failed: %v", err) return } wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Receive an Upload"})) defer wpf(w, tplx.Foot) wpf(w, "<pre>") defer wpf(w, "</pre>") fields := []string{"getparam1", "mountname", "description"} for _, v := range fields { lg("%12v => %q", v, r.FormValue(v)) } mountPoint := dsfs.MountPointLast() if len(r.FormValue("mountname")) > 0 { mountPoint = r.FormValue("mountname") } lg("mount point is %v", mountPoint) fs1 := dsfs.New( dsfs.MountName(mountPoint), dsfs.AeContext(c), ) // As closure, since we cannot define dsfs.dsFileSys as parameter funcSave := func(argName string, data []byte) (error, *bytes.Buffer) { b1 := new(bytes.Buffer) fs1 := dsfs.New( dsfs.MountName(mountPoint), dsfs.AeContext(c), ) dir, bname := fs1.SplitX(argName) err := fs1.MkdirAll(dir, 0777) wpf(b1, "mkdir %v - %v\n", dir, err) if err != nil { return err, b1 } err = fs1.WriteFile(path.Join(dir, bname), data, 0777) wpf(b1, "saved file content to %v - %v\n", argName, err) return err, b1 } ff := "filefield" file, handler, err := r.FormFile(ff) if err != nil { lg("error calling FormFile from %q => %v", ff, err) return } if handler == nil { lg("no multipart file %q", ff) } else { lg("extracted file %v", handler.Filename) data, err := ioutil.ReadAll(file) if err != nil { lg("ReadAll on uploaded file failed: %v", err) return } defer file.Close() lg("extracted file content; %v bytes", len(data)) newFilename := docRootDataStore + handler.Filename ext := path.Ext(newFilename) if ext == ".zip" { lg("found zip - treat as dir-tree %q", newFilename) r, err := zip.NewReader(file, int64(len(data))) if err != nil { lg("open as zip failed: %v", err) return } for _, f := range r.File { newFilename = docRootDataStore + f.Name dir, bname := fs1.SplitX(newFilename) if f.FileInfo().IsDir() { lg("\t dir %s", newFilename) err := fs1.MkdirAll(path.Join(dir, bname), 0777) if err != nil { lg("MkdirAll %v failed: %v", newFilename, err) return } } else { lg("\t file %s", newFilename) rc, err := f.Open() if err != nil { return } defer func(rc io.ReadCloser) { if err := rc.Close(); err != nil { panic(err) } }(rc) bts := new(bytes.Buffer) size, err := io.Copy(bts, rc) if err != nil { lg("Could not copy from zipped file %v: %v", newFilename, err) return } err = common.WriteFile(fsi.FileSystem(fs1), path.Join(dir, bname), bts.Bytes()) // err = fs1.WriteFile(path.Join(dir, bname), bts.Bytes(), 0777) if err != nil { lg("WriteFile of zipped file %v failed: %v", newFilename, err) return } lg("\t saved %v - %v Bytes", newFilename, size) } } } else { err, b2 := funcSave(newFilename, data) lg("%s", b2) if err != nil { return } } errMc := memcache.Flush(appengine.NewContext(r)) if errMc != nil { lg("Error flushing memache: %v", errMc) return } lg("--------------------\n") } }
// dedupHTTP wraps Dedup() func dedupHTTP(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { lg, b := loghttp.BuffLoggerUniversal(w, r) closureOverBuf := func(bUnused *bytes.Buffer) { loghttp.Pf(w, r, b.String()) } defer closureOverBuf(b) // the argument is ignored, r.Header.Set("X-Custom-Header-Counter", "nocounter") wpf(b, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Deduplicating redundant stuff"})) defer wpf(b, tplx.Foot) wpf(b, "<pre>") defer wpf(b, "</pre>") err := r.ParseForm() lg(err) surl := r.FormValue(routes.URLParamKey) ourl, err := fetch.URLFromString(surl) lg(err) if err != nil { return } if ourl.Host == "" { lg("host is empty (%v)", surl) return } knownProtocol := "" if r.FormValue("prot") != "" { knownProtocol = r.FormValue("prot") } lg("Host %q, Path %q", ourl.Host, ourl.Path) fs := GetFS(appengine.NewContext(r), 0) least3Files := FetchAndDecodeJSON(r, ourl.String(), knownProtocol, lg, fs) lg("Fetched and decoded; found %v", len(least3Files)) if len(least3Files) > 0 { doc := Dedup(ourl, least3Files, lg, fs) fNamer := domclean2.FileNamer(logDir, 0) fNamer() // first call yields key fsPerm := GetFS(appengine.NewContext(r), 0) fileDump(lg, fsPerm, doc, fNamer, "_fin.html") lg("MapSimiliarCompares: %v SimpleCompares: %v LevenstheinComp: %v\n", breakMapsTooDistinct, appliedLevenshtein, appliedCompare) lg("Finish\n") var b2 bytes.Buffer err := html.Render(&b2, doc) lg(err) if err != nil { return } b = new(bytes.Buffer) // w.Write([]byte("aa")) w.Header().Set("Content-type", "text/html; charset=utf-8") w.Write(b2.Bytes()) } }
func fetchSimForm(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { lg, b := loghttp.BuffLoggerUniversal(w, r) closureOverBuf := func(bUnused *bytes.Buffer) { loghttp.Pf(w, r, b.String()) } defer closureOverBuf(b) // the argument is ignored, r.Header.Set("X-Custom-Header-Counter", "nocounter") // on live server => always use https if r.URL.Scheme != "https" && !util_appengine.IsLocalEnviron() { r.URL.Scheme = "https" r.URL.Host = r.Host lg("lo - redirect %v", r.URL.String()) http.Redirect(w, r, r.URL.String(), http.StatusFound) } err := r.ParseForm() lg(err) rURL := "" if r.FormValue(routes.URLParamKey) != "" { rURL = r.FormValue(routes.URLParamKey) } if len(rURL) == 0 { wpf(b, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Find similar HTML URLs"})) defer wpf(b, tplx.Foot) tm := map[string]string{ "val": "www.welt.de/politik/ausland/article146154432/Tuerkische-Bodentruppen-marschieren-im-Nordirak-ein.html", "fieldname": routes.URLParamKey, } tplForm := tt.Must(tt.New("tplName01").Parse(htmlForm)) tplForm.Execute(b, tm) } else { fullURL := fmt.Sprintf("https://%s%s?%s=%s&cnt=%s&prot=%s", r.Host, routes.FetchSimilarURI, routes.URLParamKey, rURL, r.FormValue("cnt"), r.FormValue("prot")) lg("lo - sending to URL 1: %v", fullURL) fo := fetch.Options{} fo.URL = fullURL bts, inf, err := fetch.UrlGetter(r, fo) _ = inf lg(err) if err != nil { return } if len(bts) == 0 { lg("empty bts") return } var mp map[string][]byte err = json.Unmarshal(bts, &mp) lg(err) if err != nil { lg("%s", bts) return } w.Header().Set("Content-Type", "text/html; charset=utf-8") if _, ok := mp["msg"]; ok { w.Write(mp["msg"]) } for k, v := range mp { if k != "msg" { wpf(w, "<br><br>%s:\n", k) if true { wpf(w, "len %v", len(v)) } else { wpf(w, "%s", html.EscapeString(string(v))) } } } } }
// We cannot use http.FileServer(http.Dir("./css/") // to dispatch our dsfs files. // We need the appengine context to initialize dsfs. // Thus we have to re-implement a serveFile method: func FsiFileServer(w http.ResponseWriter, r *http.Request, opt Options) { r.Header.Set("X-Custom-Header-Counter", "nocounter") lg, b1 := loghttp.BuffLoggerUniversal(w, r) fclose := func() { // Only upon error. // If everything is fine, we reset fclose at the end. w.Write(b1.Bytes()) } defer fclose() wpf(b1, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Half-Static-File-Server"})) wpf(b1, "<pre>") err := r.ParseForm() if err != nil { wpf(b1, "err parsing request (ParseForm)%v", err) } p := r.URL.Path if strings.HasPrefix(p, opt.Prefix) { // p = p[len(prefix):] p = strings.TrimPrefix(p, opt.Prefix) } else { wpf(b1, "route must start with prefix %v - but is %v", opt.Prefix, p) } if strings.HasPrefix(p, "/") { p = p[1:] } wpf(b1, "effective path = %q", p) // fullP := path.Join(docRootDataStore, p) fullP := p f, err := opt.FS.Open(fullP) if err != nil { wpf(b1, "err opening file %v - %v", fullP, err) return } defer f.Close() inf, err := f.Stat() if err != nil { wpf(b1, "err opening fileinfo %v - %v", fullP, err) return } if inf.IsDir() { wpf(b1, "%v is a directory - trying index.html...", fullP) fullP += "/index.html" fIndex, err := opt.FS.Open(fullP) if err == nil { defer fIndex.Close() inf, err = fIndex.Stat() if err != nil { wpf(b1, "err opening index fileinfo %v - %v", fullP, err) return } f = fIndex } else { wpf(b1, "err opening index file %v - %v", fullP, err) if r.FormValue("fmt") == "html" { dirListHtml(w, r, f) } else { dirListJson(w, r, f) } b1 = new(bytes.Buffer) // success => reset the message log => dumps an empty buffer return } } wpf(b1, "opened file %v - %v - %v", f.Name(), inf.Size(), err) bts1, err := ioutil.ReadAll(f) if err != nil { wpf(b1, "err with ReadAll %v - %v", fullP, err) return } ext := path.Ext(fullP) ext = strings.ToLower(ext) if ext == ".snappy" { btsDec, err := snappy.Decode(nil, bts1) if err != nil { wpf(b1, "err decoding snappy: "+err.Error()) } else { lg("decoded from %vkB to %vkB", len(bts1)/1024, len(btsDec)/1024) bts1 = btsDec } fullP = strings.TrimSuffix(fullP, path.Ext(fullP)) ext = path.Ext(fullP) ext = strings.ToLower(ext) lg("new extension is %v", ext) } tp := mime.TypeByExtension(ext) w.Header().Set("Content-Type", tp) // // caching // either explicitly discourage // or explicitly encourage if false || ext == ".css" || ext == ".js" || ext == "css" || ext == "js" || ext == ".jpg" || ext == ".gif" || ext == "jpg" || ext == "gif" || false { if strings.Contains(fullP, "tamper-monkey") { htmlfrag.SetNocacheHeaders(w) } else { htmlfrag.CacheHeaders(w) } } else { htmlfrag.SetNocacheHeaders(w) } for k, v := range opt.Replacements { bts1 = bytes.Replace(bts1, []byte(k), v, -1) } if opt.Cutout { sep := []byte("<span id='CUTOUT'></span>") spl := bytes.Split(bts1, sep) if len(spl) > 1 { bts2 := []byte{} for i, part := range spl { if i%2 == 0 { bts2 = append(bts2, part...) } } bts1 = bts2 } } w.Write(bts1) b1 = new(bytes.Buffer) // success => reset the message log => dumps an empty buffer }
func backend(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { w.Header().Set("Content-type", "text/html; charset=utf-8") w.WriteHeader(http.StatusOK) if ok, _, msg := login.CheckForAdminUser(r); !ok { w.Write([]byte(msg)) return } b1 := new(bytes.Buffer) b1.WriteString(tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Backend V1"})) htmlfrag.Wb(b1, "Debug pprof", "/debug/pprof") htmlfrag.Wb(b1, "Diverse", "nobr") htmlfrag.Wb(b1, "Schreib-Methoden", "/write-methods") htmlfrag.Wb(b1, "Letzte Email", "/email-view") htmlfrag.Wb(b1, "Blob List", "/blob2") htmlfrag.Wb(b1, "fetch via proxy", routes.ProxifyURI) htmlfrag.Wb(b1, "Instance Info", "/instance-info/view") htmlfrag.Wb(b1, "Fulltext put", "/fulltext-search/put") htmlfrag.Wb(b1, "Fulltext get", "/fulltext-search/get") htmlfrag.Wb(b1, "datastore object view quoted printabe", "/dsu/show") htmlfrag.Wb(b1, "Statistics", "/_ah/stats") htmlfrag.Wb(b1, "Request Images ", "") htmlfrag.Wb(b1, "WrapBlob from Datastore", "/image/img-from-datastore?p=chart1") htmlfrag.Wb(b1, "base64 from Datastore", "/image/base64-from-datastore?p=chart1") htmlfrag.Wb(b1, "base64 from Variable", "/image/base64-from-var?p=1") htmlfrag.Wb(b1, "base64 from File", "/image/base64-from-file?p=static/pberg1.png") htmlfrag.Wb(b1, "Drawing a static chart", "/image/draw-lines-example") htmlfrag.Wb(b1, "Big Query ...", "") htmlfrag.Wb(b1, "Get real data", "/big-query/query-into-datastore") htmlfrag.Wb(b1, "Get mocked data", "/big-query/mock-data-into-datastore") htmlfrag.Wb(b1, " ... with Chart", "") htmlfrag.Wb(b1, "Process Data 1 (mock=1)", "/big-query/regroup-data-01?mock=0") htmlfrag.Wb(b1, "Process Data 2", "/big-query/regroup-data-02?f=table") htmlfrag.Wb(b1, "Show as Table", "/big-query/show-table") htmlfrag.Wb(b1, "Show as Chart", "/big-query/show-chart") htmlfrag.Wb(b1, "As HTML", "/big-query/html") htmlfrag.Wb(b1, "Namespaces + Task Queues", "") htmlfrag.Wb(b1, "Increment", "/namespaced-counters/increment") htmlfrag.Wb(b1, "Read", "/namespaced-counters/read") htmlfrag.Wb(b1, "Push to task-queue", "/namespaced-counters/queue-push") htmlfrag.Wb(b1, "URLs with/without ancestors", "nobr") htmlfrag.Wb(b1, "Backend", "/save-url/backend") htmlfrag.Wb(b1, "Guest Book", "") htmlfrag.Wb(b1, "Eintrag hinzufügen", "/guest-entry") htmlfrag.Wb(b1, "Einträge auflisten", "/guest-view") htmlfrag.Wb(b1, "Einträge auflisten - paged - serialized cursor", "/guest-view-cursor") b1.WriteString("<hr>\n") uiDsFs := webapi.BackendUIRendered() b1.Write(uiDsFs.Bytes()) b1.WriteString("<hr>\n") b1.Write(upload.BackendUIRendered().Bytes()) b1.Write(repo.BackendUIRendered().Bytes()) b1.Write(dedup.BackendUIRendered().Bytes()) b1.Write(coinbase.BackendUIRendered().Bytes()) b1.Write(tplx.BackendUIRendered().Bytes()) b1.Write(login.BackendUIRendered().Bytes()) b1.WriteString("<br>\n") b1.WriteString("<hr>\n") urlLocalAdmin := fmt.Sprintf("http://localhost:%v/mail", routes.DevAdminPort()) ancLocalAdmin := fmt.Sprintf(" <a target='_gae' href='%v' >local app console</a><br>\n", urlLocalAdmin) b1.WriteString(ancLocalAdmin) urlConsole := fmt.Sprintf("https://console.developers.google.com/project/%v", routes.AppID()) ancConsole := fmt.Sprintf("<a target='_gae' href='%v' ><b>global</b> developer console</a>\n", urlConsole) b1.WriteString(ancConsole) urlOldAdmin := fmt.Sprintf("https://appengine.google.com/settings?&app_id=s~%v", routes.AppID()) ancOldAdmin := fmt.Sprintf(" <a target='_gae' href='%v' >old admin UI</a><br>\n ", urlOldAdmin) b1.WriteString(ancOldAdmin) b1.WriteString(` <a target='_gae' href='http://go-lint.appspot.com/github.com/pbberlin/tools/dsu' >lint a package</a><br>`) dir := m["dir"].(string) base := m["base"].(string) b1.WriteString("<br>\n") b1.WriteString("Dir: --" + dir + "-- Base: --" + base + "-- <br>\n") b1.WriteString("<br>\n") s := fmt.Sprintf("IntegerSequenes a, b: %v %v %v<br>\n", util.MyIntSeq01(), util.MyIntSeq01(), util.MyIntSeq02()) b1.WriteString(s) // b1.WriteString("<br>\n") // b1.WriteString(fmt.Sprintf("Temp dir is %s<br>\n", os.TempDir())) b1.WriteString("<br>\n") io.WriteString(b1, "Date: "+util.TimeMarker()+" - ") b1.WriteString(fmt.Sprintf("Last Month %q - 24 Months ago is %q<br>\n", util.MonthsBack(0), util.MonthsBack(24))) b1.WriteString("<br>\n") x1 := " z" + stringspb.IncrementString("--z") x2 := " Z" + stringspb.IncrementString("--Z") x3 := " 9" + stringspb.IncrementString("--9") x4 := stringspb.IncrementString(" --Peter") sEnc := "Łódź < " + stringspb.IncrementString("Łódź") + x1 + x2 + x3 + x4 b1.WriteString(fmt.Sprint(string([]byte(sEnc)), "<br>")) b1.WriteString(tplx.Foot) w.Write(b1.Bytes()) }