Ejemplo n.º 1
0
// we want to extract the 'image/png' from
// 	"data:image/png;base64,..."
//func MimeFromBase64(b *bytes.Buffer)(mime string){
func MimeFromBase64(b io.Reader) (mime string) {

	lg, b := loghttp.BuffLoggerUniversal(nil, nil)
	_ = b

	// to avoid huge string allocation
	//   we read the first 100 bytes
	b1 := make([]byte, 100)
	_, err := b.Read(b1)
	lg(err)

	s := string(b1)

	pos := base64HeaderPosition(s)

	if pos > 0 {
		tmp1 := s[:pos]
		tmp2 := strings.Split(tmp1, ";")
		tmp3 := strings.Split(tmp2[0], ":")
		mime = tmp3[1]
	}

	return

}
Ejemplo n.º 2
0
func rssDoc2DirTree(w http.ResponseWriter, r *http.Request, treeX *DirTree, rssDoc RSS, domain string) {

	lg, lge := loghttp.Logger(w, r)
	_ = lg

	if treeX == nil {
		treeX = &DirTree{Name: "root1", Dirs: map[string]DirTree{}, LastFound: time.Now().Truncate(time.Minute)}
	}

	articleList := []FullArticle{}

	for _, lpItem := range rssDoc.Items.ItemList {

		t, err := time.Parse(time.RFC1123Z, lpItem.Published)
		//     := time.Parse("Mon, 2 Jan 2006 15:04:05 -0700", lpItem.Published)
		lge(err)

		articleList = append(articleList, FullArticle{Url: lpItem.Link, Mod: t})

	}

	lg1, _ := loghttp.BuffLoggerUniversal(w, r)

	path2DirTree(lg1, treeX, articleList, domain, true)

}
Ejemplo n.º 3
0
func GetHomeTpl(w http.ResponseWriter, r *http.Request, title, body string) *template.Template {

	if body == "" {
		body = IDCardHTML + UserInfoHTML
	}

	lg, _ := loghttp.BuffLoggerUniversal(w, r)

	bstpl := tplx.TemplateFromHugoPage(w, r)

	b := new(bytes.Buffer)

	fmt.Fprintf(b, tplx.ExecTplHelper(bstpl, map[string]interface{}{
		// "HtmlTitle":       "{{ .HtmlTitle }}", // this seems to cause problems sometimes.
		"HtmlTitle":       title,
		"HtmlDescription": "", // reminder
		"HtmlHeaders":     template.HTML(Headers),
		"HtmlContent":     template.HTML(body),
	}))

	intHomeTemplate, err := template.New("home").Parse(b.String())
	lg(err)

	return intHomeTemplate

}
func queuePush(w http.ResponseWriter, r *http.Request, mx map[string]interface{}) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	_ = b

	c := appengine.NewContext(r)

	m := map[string][]string{"counter_name": []string{nscStringKey}}
	t := taskqueue.NewPOSTTask("/_ah/namespaced-counters/queue-pop", m)

	taskqueue.Add(c, t, "")

	c, err := appengine.Namespace(c, altNamespace)
	lg(err)
	taskqueue.Add(c, t, "")

	io.WriteString(w, "tasks enqueued\n")

	io.WriteString(w, "\ncounter values now: \n")
	readBothNamespaces(w, r, mx)

	io.WriteString(w, "\n\n...sleeping... \n")
	time.Sleep(time.Duration(400) * time.Millisecond)
	readBothNamespaces(w, r, mx)

}
Ejemplo n.º 5
0
// unused
// probably  efficient enough just to call
// var bEnc []byte = []byte(sEnc)
func StringToVByte(s string) (*bytes.Buffer, *bytes.Buffer) {

	lg, b := loghttp.BuffLoggerUniversal(nil, nil)
	_ = b

	bMsg := new(bytes.Buffer)

	bDst := new(bytes.Buffer)

	const chunksize = 20
	lb := make([]byte, chunksize) // loop buffer
	rdr := strings.NewReader(s)
	for {
		n1, err := rdr.Read(lb)
		if err == io.EOF {
			break
		}
		lg(err)
		if n1 < 1 {
			break
		}

		independentCopy := make([]byte, n1)
		copy(independentCopy, lb)
		n2, err := bDst.Write(independentCopy)
		lg(err)

		bMsg.WriteString(fmt.Sprintln("reading", n1, "bytes - writing", n2, "bytes: \n"))
		bMsg.WriteString(fmt.Sprint(" --", string(independentCopy), "--<br>\n"))
	}

	return bDst, bMsg

}
func readBothNamespaces(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	_ = b

	var c1, c2 int64
	var s1, s2 string
	var err error
	var reset bool = false

	p := r.FormValue("reset")
	if p != "" {
		reset = true
	}

	c := appengine.NewContext(r)
	c1, err = agnosticReadReset(c, reset)
	lg(err)

	{
		c, err = appengine.Namespace(c, altNamespace)
		lg(err)
		c2, err = agnosticReadReset(c, reset)
		lg(err)
	}

	s1 = fmt.Sprintf("%v", c1)
	s2 = fmt.Sprintf("%v", c2)

	io.WriteString(w, "|"+s1+"|    |"+s2+"|")
	if reset {
		io.WriteString(w, "     and reset")
	}

}
Ejemplo n.º 7
0
func searchPut(w http.ResponseWriter, r *http.Request) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	_ = b

	id := "PA6-5001"
	user := &User{
		Customer:  "Carl Corral",
		Comment:   "I am <em>riled up</em> text",
		Visits:    1,
		LastVisit: time.Now(),
		Birthday:  time.Date(1968, time.May, 19, 0, 0, 0, 0, time.UTC),
	}

	c := appengine.NewContext(r)

	index, err := search.Open("users")
	lg(err)

	ret_id, err := index.Put(c, id, user)
	lg(err)

	fmt.Fprint(w, "OK, saved "+ret_id+"\n\n")

	var u2 User
	err = index.Get(c, ret_id, &u2)
	lg(err)
	fmt.Fprint(w, "Retrieved document: ", u2)

}
Ejemplo n.º 8
0
/*
https://tec-news.appspot.com/coinbase-integr/redir-success1?
order[button][description]=When and how Bitcoin decline will start.&
order[button][id]=0025d69ea925b48ba2b7adeb2a911ca2&
order[button][name]=Bitcoin Analysis&
order[button][repeat]=&
order[button][resource_path]=/v2/checkouts/4f1e5ecc-c8fc-56fc-926c-15a7eebd8314&
order[button][subscription]=false&
order[button][type]=buy_now&
order[button][uuid]=4f1e5ecc-c8fc-56fc-926c-15a7eebd8314&
order[created_at]=2015-10-26 08:03:17 -0700&
order[custom]=productID=/member/tec-news/crypto-experts-neglect-one-vital-aspect&uID=14952300052240127534&
order[event]=&
order[id]=GAB5VN36&
order[metadata]=&
order[receive_address]=myL84ofiymQpzzmJ7Foc9F2wQ4GMuSuQ3f&
order[refund_address]=mwaz3wxMbnZrBZUSZpVHr51xjQ6Swx756b&
order[resource_path]=/v2/orders/9bbf6fde-530a-53a4-bf94-d54fc3f43d40&
order[status]=completed&
order[total_btc][cents]=5600.0&
order[total_btc][currency_iso]=BTC&
order[total_native][cents]=50.0&
order[total_native][currency_iso]=EUR&
order[total_payout][cents]=0.0&
order[total_payout][currency_iso]=USD&
order[transaction][confirmations]=0&
order[transaction][hash]=ada26d75ff1e16b4febf539433d5260441171560c57adfff2ac968be37108112&
order[transaction][id]=562e40dede472f26be000018&
order[uuid]=9bbf6fde-530a-53a4-bf94-d54fc3f43d40
*/
func paymentSuccess(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	r.Header.Set("X-Custom-Header-Counter", "nocounter")
	lg, _ := loghttp.BuffLoggerUniversal(w, r)

	err := r.ParseForm()
	if err != nil {
		lg(err)
		http.Error(w, err.Error(), http.StatusInternalServerError)
		return
	}

	custom := r.Form.Get("order[custom]")
	// w.Write([]byte("custom=" + custom + "<br>\n"))

	values, err := url.ParseQuery(custom)
	if err != nil {
		lg(err)
		http.Error(w, err.Error(), http.StatusInternalServerError)
		return
	}

	productID := values.Get("productID")
	uID := values.Get("uID")

	if productID != "" {
		lg("about to redirect to %v", productID)
		http.Redirect(w, r, productID+"?redirected-from=paymentsucc", http.StatusFound)
		return
	}

	w.Write([]byte("productID=" + productID + " uID=" + uID + "<br>\n"))

}
Ejemplo n.º 9
0
// Dom2File writes DOM to file
func Dom2File(fn string, node *html.Node) {
	lg, _ := loghttp.BuffLoggerUniversal(nil, nil)

	var b bytes.Buffer
	err := html.Render(&b, node)
	lg(err)
	Bytes2File(fn, b.Bytes())
}
func queuePop(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	_ = b

	c := appengine.NewContext(r)
	err := agnosticIncrement(c)
	c.Infof("qp")
	lg(err)
}
Ejemplo n.º 11
0
func Base64_str_to_img(base64_img string) (img image.Image, format string) {

	lg, b := loghttp.BuffLoggerUniversal(nil, nil)
	_ = b

	pos := base64HeaderPosition(base64_img)

	reader := base64.NewDecoder(base64.StdEncoding, strings.NewReader(base64_img[pos:]))
	img, format, err := image.Decode(reader)
	lg(err)
	return
}
Ejemplo n.º 12
0
func deleteAll(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	lg, _ := loghttp.BuffLoggerUniversal(w, r)

	err := r.ParseForm()
	lg(err)

	wpf(w, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Delete all filesystem data"}))
	defer wpf(w, tplx.Foot)

	confirm := r.FormValue("confirm")
	if confirm != "yes" {
		wpf(w, "All dsfs contents are deletes. All memfs contents are deleted<br>\n")
		wpf(w, "Put a get param into the URL ?confirm - and set it to 'yes'<br>\n")
		wpf(w, "Put a get param 'mountname' into url; i.e. mountname=mntftch<br>\n")
		return
	}

	wpf(w, "<pre>\n")
	defer wpf(w, "\n</pre>")

	//
	//
	fs := dsfs.New(
		dsfs.AeContext(appengine.NewContext(r)),
	)

	mountName := r.FormValue("mountname")
	if mountName != "" {
		wpf(w, "mountame = "+mountName+"\n")
		fs = dsfs.New(
			dsfs.AeContext(appengine.NewContext(r)),
			dsfs.MountName(mountName),
		)
	}

	wpf(w, "dsfs:\n")
	msg, err := fs.DeleteAll()
	if err != nil {
		wpf(w, "err during delete %v\n", err)
	}
	wpf(w, msg)

	memMapFileSys = memfs.New()
	wpf(w, "\n")
	wpf(w, "memMapFs new")

	// cleanup must be manual
	osFileSys = osfs.New()

}
Ejemplo n.º 13
0
// Bytes2File writes bytes; creates path if neccessary
// and logs any errors even to appengine log
func Bytes2File(fn string, b []byte) {
	lg, _ := loghttp.BuffLoggerUniversal(nil, nil)

	var err error
	err = ioutil.WriteFile(fn, b, 0)
	if err != nil {

		err = os.MkdirAll(filepath.Dir(fn), os.ModePerm)
		lg(err, "directory creation failed: %v")

		err = ioutil.WriteFile(fn, b, 0)
		lg(err)

	}
}
Ejemplo n.º 14
0
// based on bytes.Buffer and Writing into it
func VVByte_to_string(m [][]byte) (*bytes.Buffer, *bytes.Buffer) {

	lg, b := loghttp.BuffLoggerUniversal(nil, nil)
	_ = b

	bRet := new(bytes.Buffer)
	bMsg := new(bytes.Buffer)

	//for i,v := range m {
	for i := 0; i < len(m); i++ {
		n, err := bRet.Write(m[i])
		lg(err)
		bMsg.WriteString(" lp" + util.Itos(i) + ": writing " + util.Itos(n) + " bytes: \n")
	}
	return bRet, bMsg
}
Ejemplo n.º 15
0
func splitPort(hp string) string {

	lg, b := loghttp.BuffLoggerUniversal(nil, nil)
	_ = b

	host, port, err := net.SplitHostPort(hp)
	_ = port

	if err != nil {
		if strings.Contains(err.Error(), "missing port in address") {
			// normal
		} else {
			lg(err)
		}
		host = hp
	}

	return host
}
Ejemplo n.º 16
0
func String_to_VVByte(base64_img string) ([][]byte, *bytes.Buffer) {

	lg, _ := loghttp.BuffLoggerUniversal(nil, nil)

	bMsg := new(bytes.Buffer)

	const chunksize = 400 //

	var size_o int
	if len(base64_img)%chunksize == 0 {
		size_o = len(base64_img) / chunksize
	} else {
		size_o = len(base64_img)/chunksize + 1
	}

	VVByte := make([][]byte, size_o)

	cntr := -1
	b := make([]byte, chunksize)
	rdr := strings.NewReader(base64_img)
	for {
		cntr++
		n, err := rdr.Read(b)
		if err == io.EOF {
			break
		}
		lg(err)
		if n < 1 {
			break
		}

		indep_copy := make([]byte, n)
		copy(indep_copy, b)
		VVByte[cntr] = indep_copy

		bMsg.WriteString("reading " + util.Itos(n) + " bytes:\n")
		//bMsg.Write(  VVByte[util.Itos(cntr)]  )
	}

	return VVByte, bMsg

}
func incrementBothNamespaces(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	_ = b

	c := appengine.NewContext(r)
	err := agnosticIncrement(c)
	lg(err)

	{
		c, err := appengine.Namespace(c, altNamespace)
		lg(err)
		err = agnosticIncrement(c)
		lg(err)
	}

	s := `counters updates für ns=''  and ns='ns01'.` + "\n"
	io.WriteString(w, s)
	readBothNamespaces(w, r, m)

}
Ejemplo n.º 18
0
func TemplateFromHugoPage(w http.ResponseWriter, r *http.Request) string {

	mp.Lock()
	if tpl, ok := mp.mp[pathToTmpl]; ok {
		mp.Unlock()
		return tpl
	}
	mp.Unlock()

	lg, _ := loghttp.BuffLoggerUniversal(w, r)

	//
	fs2 := dsfs.New(
		dsfs.MountName(TplPrefix[1:]),
		dsfs.AeContext(appengine.NewContext(r)),
	)
	fs1.SetOption(
		memfs.ShadowFS(fs2),
	)

	bts, err := fs1.ReadFile(pathToTmpl)
	if err != nil {
		lg(err)
		bts = hugoTplFallback
	}

	bts = bytes.Replace(bts, []byte("[REPLACE_TITLE]"), []byte("{{ .HtmlTitle }}"), -1)
	bts = bytes.Replace(bts, []byte("[REPLACE_DESC]"), []byte("{{ .HtmlDescription }}"), -1)
	bts = bytes.Replace(bts, []byte("</head>"), []byte("{{ .HtmlHeaders }}\n</head>"), -1)
	bts = bytes.Replace(bts, []byte("<p>[REPLACE_CONTENT]</p>"), []byte("{{ .HtmlContent }}"), -1)
	bts = bytes.Replace(bts, []byte("[REPLACE_CONTENT]"), []byte("{{ .HtmlContent }}"), -1)
	bts = bytes.Replace(bts, []byte("<span id='REPLACE_FOOTER'></span>"), []byte("{{ .HtmlFooter }}"), -1)

	mp.Lock()
	mp.mp[pathToTmpl] = string(bts)
	mp.Unlock()

	return mp.mp[pathToTmpl]

}
Ejemplo n.º 19
0
func Test2(t *testing.T) {

	start := time.Now()

	lg, b := loghttp.BuffLoggerUniversal(nil, nil)
	_ = b
	// closureOverBuf := func(bUnused *bytes.Buffer) {
	// 	loghttp.Pf(nil, nil, b.String())
	// }
	// defer closureOverBuf(b) // the argument is ignored,

	var c aetest.Context
	if false {
		var err error
		c, err = aetest.NewContext(nil)
		lg(err)
		if err != nil {
			return
		}
		defer c.Close()
	}
	fs := GetFS(c, 2)

	lg("took1 %4.2v secs", time.Now().Sub(start).Seconds())

	least3Files := FetchAndDecodeJSON(nil, URLs[0], "", lg, fs)

	lg("took2 %4.2v secs", time.Now().Sub(start).Seconds())

	doc := Dedup(least3Files, lg, fs)

	fNamer := domclean2.FileNamer(logDir, 0)
	fNamer() // first call yields key
	fsPerm := GetFS(c, 2)
	fileDump(lg, fsPerm, doc, fNamer, "_fin.html")

	pf("MapSimiliarCompares: %v SimpleCompares: %v LevenstheinComp: %v\n", breakMapsTooDistinct, appliedLevenshtein, appliedCompare)
	pf("Finish\n")

}
Ejemplo n.º 20
0
// convert image to string, prepend mime header
//		inspiration_1 http://stackoverflow.com/questions/22945486/golang-converting-image-image-to-byte
//		inspiration_2 https://github.com/polds/imgbase64/blob/master/images.go
//
// mime type is always image/png
// because we only accept *image.RGBA and use image/png Encoder
func Rgba_img_to_base64_str(img *image.RGBA) string {

	lg, b := loghttp.BuffLoggerUniversal(nil, nil)
	_ = b

	// img to bytes
	buf := new(bytes.Buffer)
	err := png.Encode(buf, img)
	lg(err)
	imgBytes := buf.Bytes()

	// binary bytes to base64 bytes
	e64 := base64.StdEncoding
	maxEncLen := e64.EncodedLen(len(imgBytes))
	imgEnc := make([]byte, maxEncLen)
	e64.Encode(imgEnc, imgBytes)

	// base64 bytes to string
	mimeType := "image/png"
	return fmt.Sprintf("data:%s;base64,%s", mimeType, imgEnc)

}
Ejemplo n.º 21
0
func getHomeTpl(w http.ResponseWriter, r *http.Request) *template.Template {

	lg, _ := loghttp.BuffLoggerUniversal(w, r)

	bstpl := tplx.TemplateFromHugoPage(w, r)

	b := new(bytes.Buffer)

	fmt.Fprintf(b, tplx.ExecTplHelper(bstpl, map[string]interface{}{
		// "HtmlTitle":       "{{ .HtmlTitle }}", // this seems to cause problems sometimes.
		"HtmlTitle":       "Member Area",
		"HtmlDescription": "", // reminder
		"HtmlHeaders":     template.HTML(Headers),
		"HtmlContent":     template.HTML(home1 + "\n<br><br>\n" + home2),
	}))

	intHomeTemplate, err := template.New("home").Parse(b.String())
	lg(err)

	return intHomeTemplate

}
Ejemplo n.º 22
0
func GetWidgetTpl(w http.ResponseWriter, r *http.Request, title string) *template.Template {

	lg, _ := loghttp.BuffLoggerUniversal(w, r)

	bstpl := tplx.TemplateFromHugoPage(w, r) // the jQuery irritates
	// bstpl := tplx.HugoTplNoScript

	b := new(bytes.Buffer)
	fmt.Fprintf(b, tplx.ExecTplHelper(bstpl, map[string]interface{}{
		// "HtmlTitle":       "{{ .HtmlTitle }}",  // it DOES cause some eternal loop. But why only here?
		"HtmlTitle":       title,
		"HtmlDescription": "", // reminder
		"HtmlHeaders":     template.HTML(Headers),
		"HtmlContent":     template.HTML(widgetHTML),
	}))

	intGitkitTemplate, err := template.New("widg").Parse(b.String())
	lg(err)

	return intGitkitTemplate

}
Ejemplo n.º 23
0
func searchRetrieve(w http.ResponseWriter, r *http.Request) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	_ = b

	c := appengine.NewContext(r)
	index, err := search.Open("users")
	lg(err)

	for t := index.Search(c, "Comment:riled", nil); ; {
		var res User
		id, err := t.Next(&res)
		fmt.Fprintf(w, "\n-- ")
		if err == search.Done {
			break
		}
		if err != nil {
			fmt.Fprintf(w, "Search error: %v\n", err)
			break
		}
		fmt.Fprintf(w, "%s -> %#v\n", id, res)
	}
}
Ejemplo n.º 24
0
func confirmPay(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	/*


	   http://abc.de/ef?input_transaction_hash=46178baf7de078954b5aebb71c12120b33d998faac1c165af195eae90f19b25c&shared=false&address=18tpXf8WWuhJP95JbDASbZvavmZJbrydut&destination_address=18tpXf8WWuhJP95JbDASbZvavmZJbrydut&input_address=1ZTnjSdknZvur9Gc73gvB8XBTWL7nV1m6&test=true&anonymous=false&confirmations=0&value=82493362&transaction_hash=46178baf7de078954b5aebb71c12120b33d998faac1c165af195eae90f19b25c
	*/

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	closureOverBuf := func(bUnused *bytes.Buffer) {
		loghttp.Pf(w, r, b.String())
	}
	defer closureOverBuf(b) // the argument is ignored,
	r.Header.Set("X-Custom-Header-Counter", "nocounter")

	wpf(b, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Payment confirmation"}))
	defer wpf(b, tplx.Foot)

	wpf(b, "<pre>")
	defer wpf(b, "</pre>")

	err := r.ParseForm()
	lg(err)

	custSecret := ""
	if r.FormValue("customsecret") != "" {
		custSecret = r.FormValue("customsecret")
	}
	lg("custom secret is %q", custSecret)

	val := ""
	if r.FormValue("value") != "" {
		val = r.FormValue("value")
	}
	lg("value is %q", val)

}
Ejemplo n.º 25
0
//
// https://developers.google.com/identity/choose-auth
// https://developers.google.com/identity/sign-in/web/backend-auth
func TokenSignin(w http.ResponseWriter, r *http.Request) {

	lg, _ := loghttp.BuffLoggerUniversal(w, r)

	// w.Header().Set("Access-Control-Allow-Origin", "http://localhost:1313")

	w.Header().Set("Access-Control-Allow-Origin", "http://"+routes.AppHostDev())

	w.Header().Del("Access-Control-Allow-Origin")
	w.Header().Set("Access-Control-Allow-Origin", "*")

	// err := r.ParseMultipartForm(1024 * 1024 * 2)
	err := r.ParseForm()
	lg(err)

	myToken := r.Form.Get("idtoken")
	tokSize := fmt.Sprintf("Len of Tok was %v. \n", len(myToken))

	fc1 := func(token *jwt.Token) (interface{}, error) {
		// Don't forget to validate the alg is what you expect:

		log.Printf("algo header is %v\n", token.Header["alg"])
		if _, ok := token.Method.(*jwt.SigningMethodRSA); !ok {
			return nil, fmt.Errorf("Unexpected signing method: %v", token.Header["alg"])
		}
		return token.Header["kid"], nil
	}

	token, err := jwt.Parse(myToken, fc1)

	// No direct error comparison possible; since err is wrapped in another struct
	if err != nil && strings.Contains(err.Error(), jwt.ErrPEMMappingObsolete.Error()) {

		currentPEMsURL := "https://www.googleapis.com/oauth2/v1/certs"
		req, err := http.NewRequest("GET", currentPEMsURL, nil)
		if err != nil {
			lg("creation of pem request failed")
			return
		}
		req.Header.Set("Content-Type", "application/json")

		fo := fetch.Options{Req: req}
		fo.KnownProtocol = "https"
		fo.ForceHTTPSEvenOnDevelopmentServer = true
		bts, inf, err := fetch.UrlGetter(r, fo)
		lg(err)
		if err != nil {
			lg("tried to fetch %v, %v", currentPEMsURL, inf.URL)
			lg("msg %v", inf.Msg)
			return
		}
		if len(bts) > 200 {
			var data1 map[string]string
			err = json.Unmarshal(bts, &data1)
			lg(err)
			// lg(stringspb.IndentedDumpBytes(data1))
			// w.Write(stringspb.IndentedDumpBytes(data1))
			if len(data1) > 1 {
				lg("PEM mappings updated")
				jwt.MappingToPEM = data1
			} else {
				lg("PEM mapping response contained only %v records; bytes length %v", len(data1), len(bts))
			}
		}

	}

	token, err = jwt.Parse(myToken, fc1)

	if err != nil && strings.Contains(err.Error(), jwt.ErrInvalidKey.Error()) {
		w.Write([]byte("The submitted RSA Key was somehow unparseable. We still accept the token.\n"))
		/*
			https://developers.google.com/identity/sign-in/web/backend-auth
		*/
		err = nil
		token.Valid = true
	}

	if err != nil {
		w.Write([]byte("--- " + err.Error() + ".\n"))
	}

	if err == nil && token.Valid {

		tk := ""
		tk += fmt.Sprintf("     Algor:     %v\n", token.Method)
		tk += fmt.Sprintf("     Header:    %v\n", token.Header)
		for k, v := range token.Claims {
			tk += fmt.Sprintf("\t  %-8v %v\n", k, v)
		}
		lg(tk)

		w.Write([]byte("tokensignin; valid.   \n"))
		w.Write([]byte(tokSize))
		sb := "header-sub-not-present"
		if _, ok := token.Claims["sub"]; ok {
			sb = token.Claims["sub"].(string)
		}
		w.Write([]byte("ID from PWT is " + sb + "\n"))

		_, usr, msg1 := login.CheckForNormalUser(r)
		if usr != nil {
			w.Write([]byte("ID from SRV is " + usr.ID + "\n"))
		}
		w.Write([]byte(msg1 + "\n"))

	} else {
		w.Write([]byte("tokensignin; INVALID. \n"))
		w.Write([]byte(tokSize))
		w.Write([]byte(stringspb.ToLen(myToken, 30)))

		vrf := fmt.Sprintf("\nhttps://www.googleapis.com/oauth2/v3/tokeninfo?id_token=%v \n", myToken)
		w.Write([]byte(vrf))
	}

}
Ejemplo n.º 26
0
func fetchSimForm(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	closureOverBuf := func(bUnused *bytes.Buffer) {
		loghttp.Pf(w, r, b.String())
	}
	defer closureOverBuf(b) // the argument is ignored,

	r.Header.Set("X-Custom-Header-Counter", "nocounter")

	// on live server => always use https
	if r.URL.Scheme != "https" && !util_appengine.IsLocalEnviron() {
		r.URL.Scheme = "https"
		r.URL.Host = r.Host
		lg("lo - redirect %v", r.URL.String())
		http.Redirect(w, r, r.URL.String(), http.StatusFound)
	}

	err := r.ParseForm()
	lg(err)

	rURL := ""
	if r.FormValue(routes.URLParamKey) != "" {
		rURL = r.FormValue(routes.URLParamKey)
	}
	if len(rURL) == 0 {

		wpf(b, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Find similar HTML URLs"}))
		defer wpf(b, tplx.Foot)

		tm := map[string]string{
			"val":       "www.welt.de/politik/ausland/article146154432/Tuerkische-Bodentruppen-marschieren-im-Nordirak-ein.html",
			"fieldname": routes.URLParamKey,
		}
		tplForm := tt.Must(tt.New("tplName01").Parse(htmlForm))
		tplForm.Execute(b, tm)

	} else {

		fullURL := fmt.Sprintf("https://%s%s?%s=%s&cnt=%s&prot=%s", r.Host, routes.FetchSimilarURI,
			routes.URLParamKey, rURL, r.FormValue("cnt"), r.FormValue("prot"))
		lg("lo - sending to URL 1: %v", fullURL)

		fo := fetch.Options{}
		fo.URL = fullURL
		bts, inf, err := fetch.UrlGetter(r, fo)
		_ = inf
		lg(err)
		if err != nil {
			return
		}

		if len(bts) == 0 {
			lg("empty bts")
			return
		}

		var mp map[string][]byte
		err = json.Unmarshal(bts, &mp)
		lg(err)
		if err != nil {
			lg("%s", bts)
			return
		}

		w.Header().Set("Content-Type", "text/html; charset=utf-8")
		if _, ok := mp["msg"]; ok {
			w.Write(mp["msg"])
		}

		for k, v := range mp {
			if k != "msg" {
				wpf(w, "<br><br>%s:\n", k)
				if true {
					wpf(w, "len %v", len(v))
				} else {
					wpf(w, "%s", html.EscapeString(string(v)))
				}
			}
		}

	}

}
Ejemplo n.º 27
0
func requestPay(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	closureOverBuf := func(bUnused *bytes.Buffer) {
		loghttp.Pf(w, r, b.String())
	}
	defer closureOverBuf(b) // the argument is ignored,
	r.Header.Set("X-Custom-Header-Counter", "nocounter")

	protoc := "https://"
	if appengine.IsDevAppServer() {
		protoc = "http://"
	}

	host := appengine.DefaultVersionHostname(appengine.NewContext(r))
	if appengine.IsDevAppServer() {
		host = "not-loclhost"
	}

	confirmURL := fmt.Sprintf("%v%v%v", protoc, host, uriConfirmPayment)
	confirmURL = url.QueryEscape(confirmURL)

	addrURL := fmt.Sprintf("https://%v/api/receive?method=create&address=%v&callback=%v&customsecret=49&api_code=%v",
		blockChainHost, bitCoinAddress, confirmURL, apiKey)

	req, err := http.NewRequest("GET", addrURL, nil)
	lg(err)
	if err != nil {
		return
	}
	bts, inf, err := fetch.UrlGetter(r, fetch.Options{Req: req})
	bts = bytes.Replace(bts, []byte(`","`), []byte(`", "`), -1)

	if err != nil {
		lg(err)
		lg(inf.Msg)
		return
	}

	lg("response body 1:\n")
	lg("%s\n", string(bts))

	lg("response body 2:\n")
	var data1 map[string]interface{}
	err = json.Unmarshal(bts, &data1)
	lg(err)
	lg(stringspb.IndentedDumpBytes(data1))
	// lg("%#v", data1)

	inputAddress, ok := data1["input_address"].(string)
	if !ok {
		lg("input address could not be casted to string; is type %T", data1["input_address"])
		return
	}
	feePercent, ok := data1["fee_percent"].(float64)
	if !ok {
		lg("fee percent could not be casted to float64; is type %T", data1["fee_percent"])
		return
	}

	lg("Input Adress will be %q; fee percent will be %4.2v", inputAddress, feePercent)

}
Ejemplo n.º 28
0
// FetchSimilar is an extended version of Fetch
// It is uses a DirTree of crawled *links*, not actual files.
// As it moves up the DOM, it crawls every document for additional links.
// It first moves up to find similar URLs on the same depth
//                        /\
//          /\           /  \
//    /\   /  \         /    \
// It then moves up the ladder again - to accept higher URLs
//                        /\
//          /\
//    /\
func FetchSimilar(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	closureOverBuf := func(bUnused *bytes.Buffer) {
		loghttp.Pf(w, r, b.String())
	}
	defer closureOverBuf(b) // the argument is ignored,

	r.Header.Set("X-Custom-Header-Counter", "nocounter")

	start := time.Now()

	wpf(b, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Find similar HTML URLs"}))
	defer wpf(b, tplx.Foot)

	wpf(b, "<pre>")
	defer wpf(b, "</pre>")

	fs1 := GetFS(appengine.NewContext(r))

	err := r.ParseForm()
	lg(err)

	countSimilar := 3
	sCountSimilar := r.FormValue("cnt")
	if sCountSimilar != "" {
		i, err := strconv.Atoi(strings.TrimSpace(sCountSimilar))
		if err == nil {
			countSimilar = i
		}
	}

	surl := r.FormValue(routes.URLParamKey)
	ourl, err := fetch.URLFromString(surl)
	lg(err)
	if err != nil {
		return
	}
	if ourl.Host == "" {
		lg("host is empty (%v)", surl)
		return
	}

	knownProtocol := ""
	if r.FormValue("prot") != "" {
		knownProtocol = r.FormValue("prot")
	}

	numWorkers := 0
	sNumWorkers := r.FormValue("numworkers")
	if sNumWorkers != "" {
		i, err := strconv.Atoi(strings.TrimSpace(sNumWorkers))
		if err == nil {
			numWorkers = i
		}
	}

	srcDepth := strings.Count(ourl.Path, "/")

	cmd := FetchCommand{}
	cmd.Host = ourl.Host
	cmd.SearchPrefix = ourl.Path
	cmd = addDefaults(cmd)

	dirTree := &DirTree{Name: "/", Dirs: map[string]DirTree{}, EndPoint: true}
	fnDigest := path.Join(docRoot, cmd.Host, "digest2.json")
	loadDigest(w, r, lg, fs1, fnDigest, dirTree) // previous
	lg("dirtree 400 chars is %v end of dirtree\t\t", stringspb.ToLen(dirTree.String(), 400))

	m1 := new(MyWorker)
	m1.r = r
	m1.lg = lg
	m1.fs1 = fs1
	m1.SURL = path.Join(cmd.Host, ourl.Path)
	m1.Protocol = knownProtocol
	btsSrc, modSrc, usedExisting, err := fetchSave(m1)
	if !usedExisting {
		addAnchors(lg, cmd.Host, btsSrc, dirTree)
	}
	lg(err)
	if err != nil {
		return
	}

	lg("\t\t%4.2v secs so far 1", time.Now().Sub(start).Seconds())

	var treePath string
	treePath = "/blogs/freeexchange"
	treePath = "/news/europe"
	treePath = path.Dir(ourl.Path)

	opt := LevelWiseDeeperOptions{}
	opt.Rump = treePath
	opt.ExcludeDir = "/news/americas"
	opt.ExcludeDir = "/blogs/buttonwood"
	opt.ExcludeDir = "/something-impossible"
	opt.MinDepthDiff = 1
	opt.MaxDepthDiff = 1
	opt.CondenseTrailingDirs = cmd.CondenseTrailingDirs
	opt.MaxNumber = cmd.DesiredNumber + 1  // one more for "self"
	opt.MaxNumber = cmd.DesiredNumber + 40 // collect more, 'cause we filter out those too old later

	var subtree *DirTree
	links := []FullArticle{}

	alreadyCrawled := map[string]struct{}{}

MarkOuter:
	for j := 0; j < srcDepth; j++ {
		treePath = path.Dir(ourl.Path)
	MarkInner:
		// for i := 1; i < srcDepth; i++ {
		for i := 1; i < (srcDepth + 5); i++ {

			subtree, treePath = DiveToDeepestMatch(dirTree, treePath)

			lg("Looking from height %v to level %v  - %v", srcDepth-i, srcDepth-j, treePath)

			if _, ok := alreadyCrawled[treePath]; ok {
				// lg("\t already digested %v", treePath)
				continue
			}

			m2 := new(MyWorker)
			m2.r = r
			m2.lg = lg
			m2.fs1 = fs1
			m2.SURL = path.Join(cmd.Host, treePath)
			m2.Protocol = knownProtocol

			btsPar, _, usedExisting, err := fetchSave(m2)
			lg(err)
			if err != nil {
				return
			}
			alreadyCrawled[treePath] = struct{}{}
			if !usedExisting {
				addAnchors(lg, cmd.Host, btsPar, dirTree)
			}

			if subtree == nil {
				lg("\n#%v treePath %q ; subtree is nil", i, treePath)
			} else {
				// lg("\n#%v treePath %q ; subtree exists", i, treePath)

				opt.Rump = treePath
				opt.MinDepthDiff = i - j
				opt.MaxDepthDiff = i - j
				lvlLinks := LevelWiseDeeper(nil, nil, subtree, opt)
				links = append(links, lvlLinks...)
				for _, art := range lvlLinks {
					_ = art
					// lg("#%v fnd    %v", i, stringspb.ToLen(art.Url, 100))
				}

				if len(links) >= opt.MaxNumber {
					lg("found enough links")
					break MarkOuter
				}

				pathPrev := treePath
				treePath = path.Dir(treePath)
				// lg("#%v  bef %v - aft %v", i, pathPrev, treePath)

				if pathPrev == "." && treePath == "." ||
					pathPrev == "/" && treePath == "/" ||
					pathPrev == "" && treePath == "." {
					lg("break to innner")
					break MarkInner
				}
			}

		}
	}

	//
	//
	//
	//
	lg("%v links after %4.2v secs", len(links), time.Now().Sub(start).Seconds())

	lg("============================")
	lg("Now reading/fetching actual similar files - not just the links")
	//
	tried := 0
	selecteds := []FullArticle{}

	nonExisting := []FullArticle{}
	nonExistFetched := []FullArticle{}

	for _, art := range links {

		if art.Url == ourl.Path {
			lg("skipping self\t%v", art.Url)
			continue
		}

		tried++

		useExisting := false

		semanticUri := condenseTrailingDir(art.Url, cmd.CondenseTrailingDirs)
		p := path.Join(docRoot, cmd.Host, semanticUri)

		f, err := fs1.Open(p)
		// lg(err) // its no error if file does not exist
		if err != nil {
			// lg("!nstore %q", semanticUri)
		} else {
			// lg("reading %q", semanticUri)

			// lets put this into a func, so that f.close it called at the end of this func
			// otherwise defer f.close() spans the entire func and prevents
			// overwrites chmods further down
			f := func() {
				defer f.Close()
				fi, err := f.Stat()
				lg(err)
				if err != nil {

				} else {
					age := time.Now().Sub(fi.ModTime())
					if age.Hours() < 10 {
						lg("\t\tusing existing file with age %4.2v hrs", age.Hours())
						art.Mod = fi.ModTime()
						bts, err := ioutil.ReadAll(f)
						lg(err)
						art.Body = bts
						if len(bts) < 200 {
							if bytes.Contains(bts, []byte(fetch.MsgNoRdirects)) {
								return
							}
						}
						selecteds = append(selecteds, art)
						useExisting = true
					}
				}
			}
			f()

		}

		if !useExisting {
			nonExisting = append(nonExisting, art)
		}

		if len(selecteds) >= countSimilar {
			break
		}

	}
	lg("============================")
	lg("tried %v links - yielding %v existing similars; not existing in datastore: %v, %v were requested.",
		tried, len(selecteds), len(nonExisting), countSimilar)

	if len(selecteds) < countSimilar {
		jobs := make([]distrib.Worker, 0, len(nonExisting))
		for _, art := range nonExisting {
			surl := path.Join(cmd.Host, art.Url)
			wrkr := MyWorker{SURL: surl}
			wrkr.Protocol = knownProtocol
			wrkr.r = r
			wrkr.lg = lg
			wrkr.fs1 = fs1
			job := distrib.Worker(&wrkr)
			jobs = append(jobs, job)
		}

		opt := distrib.NewDefaultOptions()
		opt.TimeOutDur = 3500 * time.Millisecond
		opt.Want = int32(countSimilar - len(selecteds) + 4) // get some more, in case we have "redirected" bodies
		opt.NumWorkers = int(opt.Want)                      // 5s query limit; => hurry; spawn as many as we want
		if numWorkers > 0 {
			opt.NumWorkers = numWorkers
		}
		lg("Preparing %v simultaneous, wanting %v fetches; at %4.2v secs.", opt.NumWorkers, opt.Want, time.Now().Sub(start).Seconds())
		opt.CollectRemainder = false // 5s query limit; => hurry; dont wait for stragglers

		ret, msg := distrib.Distrib(jobs, opt)
		lg("Distrib returned at %4.2v secs with %v results.", time.Now().Sub(start).Seconds(), len(ret))

		lg("\n" + msg.String())
		for _, v := range ret {
			v1, _ := v.Worker.(*MyWorker)
			if v1.FA != nil {
				age := time.Now().Sub(v1.FA.Mod)
				if age.Hours() < 10 {
					lg("\t\tusing fetched file with age %4.2v hrs", age.Hours())
					nonExistFetched = append(nonExistFetched, *v1.FA)
					if len(nonExistFetched) > (countSimilar - len(selecteds)) {
						break
					}
				}
			}
			if v1.err != nil {
				lg(err)
			}
		}

		lg("tried %v links - yielding %v fetched - jobs %v", len(nonExisting), len(nonExistFetched), len(jobs))
		selecteds = append(selecteds, nonExistFetched...)

		//
		//
		// Extract links
		for _, v := range nonExistFetched {
			// lg("links -> memory dirtree for %q", v.Url)
			addAnchors(lg, cmd.Host, v.Body, dirTree)
		}

	}

	//
	if time.Now().Sub(dirTree.LastFound).Seconds() < 10 {
		lg("saving accumulated (new) links to digest")
		saveDigest(lg, fs1, fnDigest, dirTree)
	}

	lg("\t\t%4.2v secs so far 3", time.Now().Sub(start).Seconds())

	mp := map[string][]byte{}
	mp["msg"] = b.Bytes()
	mp["url_self"] = []byte(condenseTrailingDir(ourl.Path, cmd.CondenseTrailingDirs))
	mp["mod_self"] = []byte(modSrc.Format(http.TimeFormat))
	mp["bod_self"] = btsSrc

	for i, v := range selecteds {
		mp["url__"+spf("%02v", i)] = []byte(v.Url)
		mp["mod__"+spf("%02v", i)] = []byte(v.Mod.Format(http.TimeFormat))
		mp["bod__"+spf("%02v", i)] = v.Body
	}

	mp["lensimilar"] = []byte(spf("%02v", len(selecteds)))

	//
	smp, err := json.MarshalIndent(mp, "", "\t")
	if err != nil {
		lg(b, "marshalling mp to []byte failed\n")
		return
	}

	r.Header.Set("X-Custom-Header-Counter", "nocounter")
	w.Header().Set("Content-Type", "application/json")
	w.Write(smp)

	b.Reset()             // this keeps the  buf pointer intact; outgoing defers are still heeded
	b = new(bytes.Buffer) // creates a *new* buf pointer; outgoing defers write into the *old* buf

	lg("\t\t%4.2v secs so far 4 (json resp written as []byte)", time.Now().Sub(start).Seconds())

	return

}
Ejemplo n.º 29
0
// BytesFromFile reads bytes and logs any
// errors even to appengine log.
func BytesFromFile(fn string) []byte {
	lg, _ := loghttp.BuffLoggerUniversal(nil, nil)
	b, err := ioutil.ReadFile(fn)
	lg(err)
	return b
}
Ejemplo n.º 30
0
// dedupHTTP wraps Dedup()
func dedupHTTP(w http.ResponseWriter, r *http.Request, m map[string]interface{}) {

	lg, b := loghttp.BuffLoggerUniversal(w, r)
	closureOverBuf := func(bUnused *bytes.Buffer) {
		loghttp.Pf(w, r, b.String())
	}
	defer closureOverBuf(b) // the argument is ignored,

	r.Header.Set("X-Custom-Header-Counter", "nocounter")

	wpf(b, tplx.ExecTplHelper(tplx.Head, map[string]interface{}{"HtmlTitle": "Deduplicating redundant stuff"}))
	defer wpf(b, tplx.Foot)

	wpf(b, "<pre>")
	defer wpf(b, "</pre>")

	err := r.ParseForm()
	lg(err)

	surl := r.FormValue(routes.URLParamKey)
	ourl, err := fetch.URLFromString(surl)
	lg(err)
	if err != nil {
		return
	}
	if ourl.Host == "" {
		lg("host is empty (%v)", surl)
		return
	}

	knownProtocol := ""
	if r.FormValue("prot") != "" {
		knownProtocol = r.FormValue("prot")
	}

	lg("Host %q, Path %q", ourl.Host, ourl.Path)

	fs := GetFS(appengine.NewContext(r), 0)

	least3Files := FetchAndDecodeJSON(r, ourl.String(), knownProtocol, lg, fs)

	lg("Fetched and decoded; found %v", len(least3Files))
	if len(least3Files) > 0 {
		doc := Dedup(ourl, least3Files, lg, fs)

		fNamer := domclean2.FileNamer(logDir, 0)
		fNamer() // first call yields key
		fsPerm := GetFS(appengine.NewContext(r), 0)
		fileDump(lg, fsPerm, doc, fNamer, "_fin.html")

		lg("MapSimiliarCompares: %v SimpleCompares: %v LevenstheinComp: %v\n", breakMapsTooDistinct, appliedLevenshtein, appliedCompare)
		lg("Finish\n")

		var b2 bytes.Buffer
		err := html.Render(&b2, doc)
		lg(err)
		if err != nil {
			return
		}

		b = new(bytes.Buffer)
		// w.Write([]byte("aa"))
		w.Header().Set("Content-type", "text/html; charset=utf-8")
		w.Write(b2.Bytes())

	}

}