func ListEntries(w http.ResponseWriter, r *http.Request) (gbEntries []GbEntryRetr, report string) { c := appengine.NewContext(r) /* High Replication Datastore: Ancestor queries are strongly consistent. Queries spanning MULTIPLE entity groups are EVENTUALLY consistent. If .Ancestor was omitted from this query, there would be slight chance that recent GB entry would not show up in a query. */ q := ds.NewQuery(GbEntryKind).Ancestor(keyParent(c)).Order("-Date").Limit(10) gbEntries = make([]GbEntryRetr, 0, 10) keys, err := q.GetAll(c, &gbEntries) if fmt.Sprintf("%T", err) == fmt.Sprintf("%T", new(ds.ErrFieldMismatch)) { //s := fmt.Sprintf("%v %T vs %v %T <br>\n",err,err,ds.ErrFieldMismatch{},ds.ErrFieldMismatch{}) loghttp.E(w, r, err, true) err = nil // ignore this one - it's caused by our deliberate differences between gbsaveEntry and gbEntrieRetr } loghttp.E(w, r, err, false) // for investigative purposes, // we var b1 bytes.Buffer var sw string var descrip []string = []string{"class", "path", "key_int_guestbk"} for i0, v0 := range keys { sKey := fmt.Sprintf("%v", v0) v1 := strings.Split(sKey, ",") sw = fmt.Sprintf("key %v", i0) b1.WriteString(sw) for i2, v2 := range v1 { d := descrip[i2] sw = fmt.Sprintf(" \t %v: %q ", d, v2) b1.WriteString(sw) } b1.WriteString("\n") } report = b1.String() for _, gbe := range gbEntries { s := gbe.Comment1 if len(s) > 0 { if pos := strings.Index(s, "0300"); pos > 1 { i1 := util.Max(pos-4, 0) i2 := util.Min(pos+24, len(s)) s1 := s[i1:i2] s1 = strings.Replace(s1, "3", "E", -1) report = fmt.Sprintf("%v -%v", report, s1) } } } return }
// DrawLine only writes to the slot label. // The pointer to the amorph remains unchanged // Empty() still reports true for the slot func (m *TransposableMatrix) DrawLine(line []Point, suffix string, pivotPointsOnly bool) { for i := 0; i < len(line); i++ { s := spf("%v%v", i%10, suffix) if suffix == "" { s = "" } if pivotPointsOnly || i == len(line)-1 { m.SetLabel(line[i].x, line[i].y, Slot{Label: s}) } else { x := util.Min(line[i+1].x, line[i].x) y := util.Min(line[i+1].y, line[i].y) dx := util.Abs(line[i+1].x - line[i].x) dy := util.Abs(line[i+1].y - line[i].y) // pf("sect :%v %v %v %v \n", x, x+dx, y, y+dy) for j := x; j <= x+dx; j++ { for k := y; k <= y+dy; k++ { m.SetLabel(j, k, Slot{Label: s}) } } } } }
// Blend a new pixel over an old pixel - heeding their alpha chan values // // algorithm NOT according to http://en.wikipedia.org/wiki/Alpha_compositing // but by my own trial and error func blendPixelOverPixel(ic_old, ic_new uint8, al_new float64) (c_res uint8) { al_old := float64(1) _ = al_old c_old := float64(ic_old) c_new := float64(ic_new) algo1 := c_old*(1-al_new) + c_new*al_new c_res = uint8(util.Min(util.Round(algo1), 255)) //log.Printf("\t\t %3.1f + %3.1f = %3.1f", c_old*(1-al_new),c_new*al_new, algo1) return }
func (m *OSMap) split(nd *node, lvl int) *node { sortedK := make([]string, len(nd.kv)) for i := 0; i < len(nd.kv); i++ { sortedK[i] = nd.kv[i].key } sort.Strings(sortedK) halfIdx := len(nd.kv) / 2 splitkey := sortedK[halfIdx] ndStat[lvl][nd]++ if lvl == 4 || lvl == 6 { kd := make([]string, 3) kd[0], kd[1], kd[2] = sortedK[0], splitkey, sortedK[len(sortedK)-1] for i := 0; i < len(kd); i++ { kd[i] = kd[i][:util.Min(len(kd[i]), 3)] } // fmt.Printf("splitting l%2v ac%3v %4q < %4q < %4q \n", lvl, m.allocCntr, kd[0], kd[1], kd[2]) } // kv1 := make([]kvt, 0, cFanout) // kv2 := make([]kvt, 0, cFanout) kv1 := m.reservoir[m.allocCntr] m.allocCntr++ kv2 := m.reservoir[m.allocCntr] m.allocCntr++ for i := 0; i < len(nd.kv); i++ { if m.less(nd.kv[i].key, splitkey) { kv1 = append(kv1, nd.kv[i]) } else { kv2 = append(kv2, nd.kv[i]) } } nd.min = sortedK[0] nd.max = sortedK[halfIdx-1] nd.kv = kv1 newNd := node{} newNd.red = true newNd.min = splitkey newNd.max = sortedK[len(sortedK)-1] newNd.kv = kv2 return &newNd }
func Fetch(amount int) { go func() { for { pfa := <-c fa := *pfa fullArticles = append(fullArticles, fa) pf("done fetching %v \n", fa.URL[27:]) } }() // cx := appengine.NewContext(r) // cl := urlfetch.Client(cx) cl := http.DefaultClient resp, err := cl.Get("http://www.handelsblatt.com/contentexport/feed/schlagzeilen") if err != nil { pf("%v\n", err) } bcntent, err := ioutil.ReadAll(resp.Body) defer resp.Body.Close() if err != nil { pf("%v\n", err) } bcntent = bytes.Replace(bcntent, []byte("content:encoded>"), []byte("content-encoded>S"), -1) // scntent := string(bcntent) // pf("size: %v \n%v\n", len(scntent), util.Ellipsoider(scntent, 1450)) var rssDoc RSS err = xml.Unmarshal(bcntent, &rssDoc) if err != nil { pf("%v\n", err) } ps := util.IndentedDump(rssDoc) s := *ps pf("- %v - \n%v\n", len(s), s[:util.Min(1600, len(s)-1)]) items := rssDoc.Items for i := 0; i < len(items.ItemList); i++ { lpItem := items.ItemList[i] pf("%v: %v - %v\n", i, lpItem.Published[5:22], lpItem.Link) go func(argURL string) { cl := http.DefaultClient resp, err := cl.Get(argURL) if err != nil { pf(" full art %v %v\n", argURL, err) } bcntent, err := ioutil.ReadAll(resp.Body) defer resp.Body.Close() if err != nil { pf(" full art %v %v\n", argURL, err) } fa := FullArticle{} fa.URL = argURL fa.Body = &bcntent c <- &fa }(lpItem.Link) if i+1 >= amount { break } } time.Sleep(4 * time.Second) pf("\n\n\n") for i := 0; i < len(fullArticles); i++ { lpFa := fullArticles[i] indir := *fullArticles[i].Body // pf("%v: %v\n\n", lpFa.URL[27:], util.Ellipsoider(string(indir), 200)) fileName := lpFa.URL fileName = strings.Replace(fileName, "https://", "", -1) fileName = strings.Replace(fileName, "http://", "", -1) pf("%v\n", fileName) fileName = fileName[strings.Index(fileName, "/")+1:] fileName = strings.Replace(fileName, "/", "--", 1) pf("%v\n", fileName) nextSlash := strings.Index(fileName, "/") if nextSlash > 0 { fileName = fileName[:strings.Index(fileName, "/")] fileName += ".html" } pf("%v\n", fileName) f, err := os.Create(fileName) if err != nil { pf(" file open %v %v\n", fileName, err) } defer f.Close() n2, err := f.Write(indir) pf("wrote %d bytes - err |%v| \n", n2, err) } }
func handleFetchURL(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) if r.URL.Scheme != "https" && !util_appengine.IsLocalEnviron() { r.URL.Scheme = "https" r.URL.Host = r.Host http.Redirect(w, r, r.URL.String(), http.StatusFound) } rURL := "" urlAsPost := "" /* To distinguish between posted and getted value, we check the "post-only" slice of values first. If nothing's there, but FormValue *has* a value, then it was "getted", otherwise "posted" */ if r.PostFormValue("url") != "" { urlAsPost = "url posted" rURL = r.PostFormValue("url") } if r.FormValue("url") != "" { if rURL == "" { urlAsPost = "url getted" rURL = r.FormValue("url") } } renderInPre := false if len(r.FormValue("renderInPre")) > 0 { renderInPre = true } elipseOutput := r.FormValue("elipseOutput") var msg, cntnt string if len(rURL) == 0 { tplAdder, tplExec := tpl_html.FuncTplBuilder(w, r) tplAdder("n_html_title", "Fetch some http data", nil) m := map[string]string{"protocol": "https", "host": r.Host, "path": fetchURL, "val": "google.com"} if util_appengine.IsLocalEnviron() { m["protocol"] = "http" } tplAdder("n_cont_0", c_formFetchURL, m) tplExec(w, r) } else { w.Header().Set("Content-type", "text/html; charset=utf-8") // w.Header().Set("Content-type", "text/html; charset=latin-1") if !strings.HasPrefix(rURL, "http://") && !strings.HasPrefix(rURL, "https://") { rURL = "https://" + rURL } u, err := url.Parse(rURL) if err != nil { panic(err) } host, port, err = net.SplitHostPort(u.Host) if err != nil { host = u.Host } log.Println("host and port: ", host, port, "of", rURL, "standalone:", u.Host) client := urlfetch.Client(c) resp, err := client.Get(rURL) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } if resp.StatusCode != http.StatusOK { fmt.Fprintf(w, "HTTP GET returned status %v<br>\n\n", resp.Status) return } defer resp.Body.Close() byteContent, err := ioutil.ReadAll(resp.Body) if err != nil { c.Errorf("%s", err) fmt.Fprintf(w, "Error %v<br>\n\n", err.Error()) return } else { msg = fmt.Sprintf("%v bytes read<br>", len(byteContent)) if urlAsPost != "" { msg += fmt.Sprintf("get overwritten by post: %v <br>", urlAsPost) } cntnt = string(byteContent) } cntnt = insertNewlines.Replace(cntnt) cntnt = undouble.Replace(cntnt) if len(elipseOutput) > 0 { cutoff := util.Min(100, len(cntnt)) fmt.Fprintf(w, "content is: <pre>"+cntnt[:cutoff]+" ... "+cntnt[len(cntnt)-cutoff:]+"</pre>") } else { if renderInPre { fmt.Fprintf(w, "content is: <pre>"+cntnt+"</pre>") } else { cntnt = ModifyHTML(r, cntnt) fmt.Fprintf(w, cntnt) } } } // cntnt = html.EscapeString(cntnt) fmt.Fprintf(w, " %s \n\n", msg) }
func similarTextifiedTrees2(src *TextifiedTree, mp map[string][]*TextifiedTree, skipPrefix map[string]bool) { // srcE := word.WrapAsEqualer(string(src.Text), true) // ssrc as Equaler srcE := wordb.WrapAsEqualer(src.Text, true) srcLen := float64(len(src.Text)) for fnKey, tts := range mp { if fnKey == src.SourceID { pf(" to %v SKIP self\n", fnKey) continue } pf(" to %v\n", fnKey) cntr, br := 0, true for _, tt := range tts { // outl, text := tt.Outl, tt.Text if tt.Lvl > src.Lvl+levelsTolerance { break // since we are now sorted by lvl, we can this is safe } if tt.Lvl == src.Lvl || (tt.Lvl > src.Lvl && tt.Lvl <= src.Lvl+levelsTolerance) { // proceed } else { continue } if src.NumTokens < 1 { continue } if src.NumTokens < 5 && tt.NumTokens > 7 { continue } if HistoBasedDistance(src, tt) > 0.51 { breakMapsTooDistinct++ continue } relSize := srcLen / float64(util.Max(1, len(tt.Text))) if relSize < 0.33 || relSize > 3 { continue } absDist, relDist := 0, 0.0 if tt.NumTokens == src.NumTokens && len(tt.Text) == len(src.Text) && bytes.Equal(tt.Text, src.Text) { absDist, relDist = 0, 0.0 appliedCompare++ } else { dstE := wordb.WrapAsEqualer(tt.Text, true) // destinations as Equaler m := levenshtein.New(srcE, dstE, opt) absDist, relDist = m.Distance() appliedLevenshtein++ } // if relDist < 0.26 && absDist < 10 { if br { pf("\t") } sd := "" sd = string(tt.Text[:util.Min(2*excerptLen, len(tt.Text)-1)]) sd = stringspb.ToLen(sd, 2*excerptLen+1) pf("%12v %v %4v %5.2v ", tt.Outline, sd, absDist, relDist) cntr++ br = false sim := Similar{} sim.SourceID = fnKey sim.Lvl = tt.Lvl sim.Outline = tt.Outline sim.AbsLevenshtein = absDist sim.RelLevenshtein = relDist sim.Text = tt.Text src.Similars = append(src.Similars, sim) src.SumAbsLevenshtein += absDist src.SumRelLevenshtein += relDist if cntr%2 == 0 || cntr > 20 { pf("\n") br = true } if cntr > 20 { break } } } if !br { pf("\n") } } }