Exemple #1
0
func (r *PhantomJSRenderer) extract(stdout io.ReadCloser, s *gryffin.Scan) {

	defer close(r.done)

	dec := json.NewDecoder(stdout)
	for {
		var m message
		err := dec.Decode(&m)
		if err == io.EOF {
			return
		} else {
			if m.responseMessage != nil {
				m.Response.fill(s)
				if s.IsDuplicatedPage() {
					return
				}

				r.chanResponse <- s
				for _, link := range m.Response.Details.Links {
					if newScan := link.toScan(s); newScan != nil && newScan.IsScanAllowed() {
						r.chanLinks <- newScan
					}
				}
			} else if m.domMessage != nil {
				for _, link := range m.domMessage.Links {
					if newScan := link.toScan(s); newScan != nil && newScan.IsScanAllowed() {
						r.chanLinks <- newScan
					}
				}
			}
		}
	}

}
Exemple #2
0
func (r *PhantomJSRenderer) extract(stdout io.ReadCloser, s *gryffin.Scan) {

	defer close(r.done)

	dec := json.NewDecoder(stdout)
	for {
		var m message
		err := dec.Decode(&m)
		if err == io.EOF {
			return
		} else {
			if m.responseMessage != nil {
				m.Response.fill(s)
				if s.IsDuplicatedPage() {
					return
				}
				r.chanResponse <- s
				r.parseDetails(&m.Response.Details, s)
			}

			if m.details != nil {
				r.parseDetails(m.details, s)
			}
		}
	}
}
Exemple #3
0
func (l *link) toScan(parent *gryffin.Scan) *gryffin.Scan {
	if req, err := http.NewRequest("GET", l.Url, nil); err == nil {
		s := parent.Spawn()
		s.MergeRequest(req)
		return s
	}
	// invalid url
	return nil
}
Exemple #4
0
func (s *Fuzzer) Fuzz(g *gryffin.Scan) (count int, err error) {

	cmd := exec.Command("echo", g.Request.URL.Host)
	_, err = cmd.Output()

	g.Logm("Dummy.Scan", fmt.Sprintf("Echo return %t", cmd.ProcessState.Success()))
	return 0, err

}
Exemple #5
0
func (s *Fuzzer) extract(g *gryffin.Scan, output string) (count int) {

	for _, l := range strings.Split(output, "\n") {
		l = strings.TrimSpace(l)
		switch {
		case strings.HasPrefix(l, "[~] Affected page"):
			g.Logm("Arachni.Findings", l)
			count++
		}
	}

	return
}
Exemple #6
0
func (s *Fuzzer) extract(g *gryffin.Scan, output string) (count int) {

	for _, l := range strings.Split(output, "\n") {
		l = strings.TrimSpace(l)
		switch {
		case strings.HasPrefix(l, "Payload: "):
			g.Logm("SQLMap.Findings", l)
			count++
		}
	}

	return
}
Exemple #7
0
func (r *PhantomJSRenderer) Do(s *gryffin.Scan) {

	r.chanResponse = make(chan *gryffin.Scan, 10)
	r.chanLinks = make(chan *gryffin.Scan, 10)
	r.done = make(chan string)

	// Construct the command.
	// render.js http(s)://<host>[:port][/path] [{"method":"post", "data":"a=1&b=2"}]
	url := s.Request.URL.String()
	cookies := make([]string, 0)
	// ua := s.Request.UserAgent()
	ua := "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36"

	for _, c := range s.Cookies {
		cookies = append(cookies, c.String())
	}

	arg := input{
		Method: s.Request.Method,
		Headers: inputHeaders{
			UserAgent: ua,
			Cookie:    strings.Join(cookies, ";"),
		},
	}

	opt, err := json.Marshal(arg)
	if err != nil {
		s.Error("PhantomjsRenderer.Do", err)
		return
	}

	// s.Logmf("PhantomjsRenderer.Do", "Running: render.js %s '%s'", url, string(opt))
	s.Logmf("PhantomjsRenderer.Do", "Running: render.js")

	cmd := exec.Command(
		os.Getenv("GOPATH")+"/src/github.com/yahoo/gryffin/renderer/resource/render.js",
		url,
		string(opt))

	stdout, err := cmd.StdoutPipe()
	if err != nil {
		s.Error("PhantomjsRenderer.Do", err)
		return
	}

	if err := cmd.Start(); err != nil {
		s.Error("PhantomjsRenderer.Do", err)
		return
	}

	r.process = cmd.Process

	// wait until done or timeout.
	go r.extract(stdout, s)
	go r.wait(s)

	cmd.Wait()

}
Exemple #8
0
func (s *Fuzzer) Fuzz(g *gryffin.Scan) (count int, err error) {

	var cookies []string
	// for _, c := range g.CookieJar.Cookies(g.Request.URL) {
	for _, c := range g.Cookies {
		cookies = append(cookies, c.String())
	}

	args := []string{
		"--checks", "xss*",
		"--output-only-positives",
		"--http-request-concurrency", "1",
		"--http-request-timeout", "10000",
		"--timeout", "00:03:00",
		"--scope-dom-depth-limit", "0",
		"--scope-directory-depth-limit", "0",
		"--scope-page-limit", "1",
		"--audit-with-both-methods",
		"--report-save-path", "/dev/null",
		"--snapshot-save-path", "/dev/null",
	}

	// TODO: Post method

	// Cookie
	if len(cookies) > 0 {
		args = append(args, "--http-cookie-string", strings.Join(cookies, ";"))
	}

	args = append(args, g.Request.URL.String())

	cmd := exec.Command("arachni", args...)

	g.Logm("Arachni.Scan", fmt.Sprintf("Run as %s", cmd.Args))

	output, err := cmd.Output()

	count = s.extract(g, string(output))

	if err != nil {
		return
	}

	g.Logm("Arachni.Scan", fmt.Sprintf("Arachni return %t", cmd.ProcessState.Success()))
	return

}
Exemple #9
0
func (f *form) toScan(parent *gryffin.Scan) *gryffin.Scan {
	m := strings.ToUpper(f.Method)
	u := f.Url
	var r io.Reader
	if m == "POST" {
		r = ioutil.NopCloser(strings.NewReader(f.Data))
	} else {
		u += "&" + f.Data
	}

	if req, err := http.NewRequest(m, u, r); err == nil {
		s := parent.Spawn()
		s.MergeRequest(req)
		return s
	}
	// invalid url
	return nil
}
Exemple #10
0
func (m *response) fill(s *gryffin.Scan) {

	/*
	   {"response":{"headers":{"Date":["Thu, 30 Jul 2015 00:13:43 GMT"],"Set-Cookie":["B=82j3nrdarir1n&b=3&s=23; expires=Sun, 30-Jul-2017 00:13:43 GMT; path=/; domain=.yahoo.com"]

	*/
	resp := &http.Response{
		Request:    s.Request,
		StatusCode: m.Status,
		Status:     strconv.FormatInt(int64(m.Status), 10),
		Proto:      "HTTP/1.1",
		ProtoMajor: 1,
		ProtoMinor: 1,
		Header:     m.Headers,
		Body:       noCloseReader{strings.NewReader(m.Body)},
	}

	s.Response = resp
	s.ReadResponseBody()

}
Exemple #11
0
func (r *PhantomJSRenderer) kill(reason string, s *gryffin.Scan) {
	if err := r.process.Kill(); err == nil {
		s.Logmf("PhantomjsRenderer.Do", "[%s] Terminating the crawl process.", reason)
	}
}
Exemple #12
0
func (r *PhantomJSRenderer) Do(s *gryffin.Scan) {

	r.chanResponse = make(chan *gryffin.Scan, 10)
	r.chanLinks = make(chan *gryffin.Scan, 10)

	// Construct the command.
	// render.js http(s)://<host>[:port][/path] [{"method":"post", "data":"a=1&b=2"}]
	url := s.Request.URL.String()
	cookies := make([]string, 0)
	// ua := s.Request.UserAgent()
	ua := "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36"

	for _, c := range s.Cookies {
		cookies = append(cookies, c.String())
	}

	arg := input{
		Method: s.Request.Method,
		Headers: inputHeaders{
			UserAgent: ua,
			Cookie:    strings.Join(cookies, ";"),
		},
	}

	opt, err := json.Marshal(arg)
	if err != nil {
		s.Error("PhantomjsRenderer.Do", err)
		return
	}

	// s.Logmf("PhantomjsRenderer.Do", "Running: render.js %s '%s'", url, string(opt))
	s.Logmf("PhantomjsRenderer.Do", "Running: render.js")

	cmd := exec.Command(
		os.Getenv("GOPATH")+"/src/github.com/yahoo/gryffin/renderer/resource/render.js",
		url,
		string(opt))

	stdout, err := cmd.StdoutPipe()
	if err != nil {
		s.Error("PhantomjsRenderer.Do", err)
		return
	}

	if err := cmd.Start(); err != nil {
		s.Error("PhantomjsRenderer.Do", err)
		return
	}

	kill := func(reason string) {
		if err := cmd.Process.Kill(); err != nil {
			// TODO - forgive "os: process already finished"
			s.Error("PhantomjsRenderer.Do", err)
			// log.Printf("error: %s", err)
		} else {
			s.Logmf("PhantomjsRenderer.Do", "[%s] Terminating the crawl process.", reason)
		}
	}
	// Kill when timeout
	_ = time.Second
	if r.Timeout != 0 {
		timeout := func() {
			<-time.After(time.Duration(r.Timeout) * time.Second)
			kill("Timeout")
		}
		go timeout()
	}

	crawl := func() {
		defer close(r.chanResponse)
		defer close(r.chanLinks)

		dec := json.NewDecoder(stdout)

		for {
			var m message
			err := dec.Decode(&m)
			if err == io.EOF {
				return
				break
			} else {
				if m.responseMessage != nil {
					m.Response.fill(s)
					if s.IsDuplicatedPage() {
						kill("Duplicated")
						return
					}
					s.Logm("PhantomjsRenderer.Do.UniqueCrawl", m.MsgType)
					r.chanResponse <- s
					for _, link := range m.Response.Details.Links {
						if newScan := link.toScan(s); newScan != nil && newScan.IsScanAllowed() {
							r.chanLinks <- newScan
						}
					}
				} else if m.domMessage != nil {
					for _, link := range m.domMessage.Links {
						if newScan := link.toScan(s); newScan != nil && newScan.IsScanAllowed() {
							r.chanLinks <- newScan
						}
					}
				}
			}
		}

		cmd.Wait()
	}

	go crawl()

}
Exemple #13
0
func (s *Fuzzer) Fuzz(g *gryffin.Scan) (count int, err error) {

	var cookies []string

	// for _, c := range g.CookieJar.Cookies(g.Request.URL) {
	for _, c := range g.Cookies {
		cookies = append(cookies, c.String())
	}

	args := []string{
		"--batch",
		"--timeout=2",
		"--retries=3",
		"--crawl=0",
		"--disable-coloring",
		"-o",
		"--text-only",
		// "--threads=4",
		"-v", "0",
		"--level=1",
		"--risk=1",
		"--smart",
		"--fresh-queries",
		"--purge-output",
		"--os=Linux",
		"--dbms=MySQL",
		"--delay=0.1",
		"--time-sec=1",
	}

	// TODO: Post method
	// if g.RequestBody != "" {
	// args = append(args, fmt.Sprintf("--data=..."
	// }

	// only for integer based injection.
	var testable []string
	for k, vs := range g.Request.URL.Query() {
		for _, v := range vs {
			_, err := strconv.ParseInt(v, 10, 64)
			if err == nil {
				// query param value is an integer
				testable = append(testable, k)
			}
		}
	}
	if len(testable) > 0 {
		args = append(args, "-p", strings.Join(testable, ","))
	}

	// Cookie
	if len(cookies) > 0 {
		fmt.Println(cookies)
		args = append(args, "--cookie", strings.Join(cookies, ";"))
	}

	args = append(args, "-u", g.Request.URL.String())

	cmd := exec.Command("sqlmap", args...)

	g.Logm("SQLMap.Scan", fmt.Sprintf("Run as %s", cmd.Args))

	output, err := cmd.Output()

	if err != nil {
		return
	}

	count = s.extract(g, string(output))

	g.Logm("SQLMap.Scan", fmt.Sprintf("SQLMap return %t", cmd.ProcessState.Success()))
	return

}
Exemple #14
0
func (r *NoScriptRenderer) Do(s *gryffin.Scan) {
	r.chanResponse = make(chan *gryffin.Scan, 10)
	r.chanLinks = make(chan *gryffin.Scan, 10)

	crawl := func() {

		defer close(r.chanResponse)
		defer close(r.chanLinks)

		client := &http.Client{}

		client.Timeout = time.Duration(3) * time.Second

		if response, err := client.Do(s.Request); err == nil {
			s.Response = response
		} else {
			s.Logm("NoScriptRenderer", fmt.Sprintf("error in building request: %s", err))
			return
		}

		s.ReadResponseBody()

		if s.IsDuplicatedPage() {
			return
		}

		tokenizer := html.NewTokenizer(strings.NewReader(s.ResponseBody))

		r.chanResponse <- s

		for {
			t := tokenizer.Next()

			switch t {

			case html.ErrorToken:
				return

			case html.StartTagToken:
				token := tokenizer.Token()
				if token.DataAtom.String() == "a" {
					for _, attr := range token.Attr {
						if attr.Key == "href" {
							link := s.Spawn()
							// TODO - we drop relative URL as it would drop "#".
							// Yet, how about real relative URLs?
							if req, err := http.NewRequest("GET", attr.Val, nil); err == nil {
								if true {
									// || req.URL.IsAbs() {
									link.MergeRequest(req)
									if link.IsScanAllowed() {
										r.chanLinks <- link
									}
								} else {
									// ignore relative URL. TOFIX.
								}
							} else {
								log.Printf("error in building request: %s", err)
							}
						}
					}
				}
			}
		}

		// parse and find links.

	}

	go crawl()

	return
}