func grabData() { resp, err := http.Get(redditURL) if err != nil { return } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return } r := new(redditResponse) err = json.NewDecoder(resp.Body).Decode(r) if err != nil { return } data = make([]item, len(r.Data.Children)) for i, child := range r.Data.Children { if !govalidator.IsURL(child.Data.URL) { continue } if !govalidator.IsURL(child.Data.Thumbnail) { child.Data.Thumbnail = "" } data[i] = child.Data } }
func encodeHandler(response http.ResponseWriter, request *http.Request, db Database, baseURL string) { decoder := json.NewDecoder(request.Body) var data struct { URL string `json:"url"` } err := decoder.Decode(&data) if err != nil { http.Error(response, `{"error": "Unable to parse json"}`, http.StatusBadRequest) return } if !govalidator.IsURL(data.URL) { http.Error(response, `{"error": "Not a valid URL"}`, http.StatusBadRequest) return } id, err := db.Save(data.URL) if err != nil { log.Println(err) return } resp := map[string]string{"url": strings.Replace(path.Join(baseURL, encode(id)), ":/", "://", 1), "id": encode(id), "error": ""} jsonData, _ := json.Marshal(resp) response.Write(jsonData) }
// ConvertToModel implements the FieldType interface func (fieldType SimpleType) ConvertToModel(value interface{}) (interface{}, error) { if value == nil { return nil, nil } valueType := reflect.TypeOf(value) switch fieldType.GetKind() { case KindString, KindUser, KindIteration, KindArea: if valueType.Kind() != reflect.String { return nil, fmt.Errorf("value %v should be %s, but is %s", value, "string", valueType.Name()) } return value, nil case KindURL: if valueType.Kind() == reflect.String && govalidator.IsURL(value.(string)) { return value, nil } return nil, fmt.Errorf("value %v should be %s, but is %s", value, "URL", valueType.Name()) case KindFloat: if valueType.Kind() != reflect.Float64 { return nil, fmt.Errorf("value %v should be %s, but is %s", value, "float64", valueType.Name()) } return value, nil case KindInteger, KindDuration: if valueType.Kind() != reflect.Int { return nil, fmt.Errorf("value %v should be %s, but is %s", value, "int", valueType.Name()) } return value, nil case KindInstant: // instant == milliseconds if !valueType.Implements(timeType) { return nil, fmt.Errorf("value %v should be %s, but is %s", value, "time.Time", valueType.Name()) } return value.(time.Time).UnixNano(), nil case KindWorkitemReference: if valueType.Kind() != reflect.String { return nil, fmt.Errorf("value %v should be %s, but is %s", value, "string", valueType.Name()) } idValue, err := strconv.Atoi(value.(string)) return idValue, errors.WithStack(err) case KindList: if (valueType.Kind() != reflect.Array) && (valueType.Kind() != reflect.Slice) { return nil, fmt.Errorf("value %v should be %s, but is %s,", value, "array/slice", valueType.Kind()) } return value, nil case KindEnum: // to be done yet | not sure what to write here as of now. return value, nil case KindMarkup: // 'markup' is just a string in the API layer for now: // it corresponds to the MarkupContent.Content field. The MarkupContent.Markup is set to the default value switch value.(type) { case rendering.MarkupContent: markupContent := value.(rendering.MarkupContent) return markupContent.ToMap(), nil default: return nil, errors.Errorf("value %v should be %s, but is %s", value, "MarkupContent", valueType) } default: return nil, errors.Errorf("unexpected type constant: '%s'", fieldType.GetKind()) } }
func (site Site) saveShort(url string) (shortest string, err error) { if !govalidator.IsURL(url) { return "", errors.New("invalid url") } redisdb := site.redisdb() defer redisdb.Close() hash := fmt.Sprintf("%x", md5.Sum([]byte(url))) similar, _ := redis.String(redisdb.Do("GET", "i:"+hash)) if similar != "" { return site.Host + similar, nil } for hashShortestLen := 1; hashShortestLen <= 32; hashShortestLen++ { s, _ := redisdb.Do("GET", hash[0:hashShortestLen]) if s == nil { shortest = hash[0:hashShortestLen] break } } if shortest == "" { return "", errors.New("url shortening failed") } redisdb.Do("SET", shortest, url) redisdb.Do("SET", "i:"+hash, shortest) return site.Host + shortest, nil }
// Spawn initializes the HTTP component func (httpsender *HTTPSender) Spawn(id int) utils.Composer { s := *httpsender s.id = id if httpsender.Config.Logger == nil { s.logger = logrus.NewEntry(logrus.New()) s.logger.Logger.Out = ioutil.Discard } else { s.logger = httpsender.Config.Logger.WithFields(logrus.Fields{ "worker": id, }) } if httpsender.Debug { s.logger.Logger.Level = logrus.DebugLevel } s.logger.Debugf("Spawning worker") if govalidator.IsURL(s.URL) { s.Client = new(http.Client) if httpsender.Config.Insecure { s.Client.Transport = &http.Transport{ TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, } } } else { s.err = errors.New("Invalid URL") } return &s }
func parsePrometheusURL() error { if cfg.prometheusURL == "" { hostname, err := os.Hostname() if err != nil { return err } _, port, err := net.SplitHostPort(cfg.web.ListenAddress) if err != nil { return err } cfg.prometheusURL = fmt.Sprintf("http://%s:%s/", hostname, port) } if ok := govalidator.IsURL(cfg.prometheusURL); !ok { return fmt.Errorf("Invalid Prometheus URL: %s", cfg.prometheusURL) } promURL, err := url.Parse(cfg.prometheusURL) if err != nil { return err } cfg.web.ExternalURL = promURL ppref := strings.TrimRight(cfg.web.ExternalURL.Path, "/") if ppref != "" && !strings.HasPrefix(ppref, "/") { ppref = "/" + ppref } cfg.web.ExternalURL.Path = ppref return nil }
func shitbucketImportHandler(w http.ResponseWriter, r *http.Request) error { if r.Method == "GET" { ctx := context.Get(r, TemplateContext).(map[string]interface{}) ctx["Title"] = "Import" return renderTemplate(w, "shitbucket-import", ctx) } if err := r.ParseForm(); err != nil { return err } url := r.PostForm["url"][0] session, err := store.Get(r, "flashes") if !govalidator.IsURL(url) { if err != nil { return err } if url == "" { session.AddFlash("URL cannot be blank", "danger") } else { session.AddFlash(fmt.Sprintf("%s is not a valid URL", url), "danger") } session.Save(r, w) } res, err := http.Get(url) if err != nil { return err } if res.StatusCode != http.StatusOK { session.AddFlash(fmt.Sprintf("%s did not return a 200 status code", url), "danger") session.Save(r, w) http.Redirect(w, r, reverse("shitbucket-import"), http.StatusSeeOther) return nil } defer res.Body.Close() content, err := ioutil.ReadAll(res.Body) if err != nil { return err } count, err := shitbucketImporter(content) if err != nil { session.AddFlash(fmt.Sprintf("There was an error importing: %s", err), "danger") session.Save(r, w) http.Redirect(w, r, reverse("shitbucket-import"), http.StatusSeeOther) return nil } session.AddFlash(fmt.Sprintf("Successfully added %d URLs from %s", count, url), "success") session.Save(r, w) http.Redirect(w, r, reverse("shitbucket-import"), http.StatusSeeOther) return nil }
func validateURLs(urls []string) string { for _, curr := range urls { if !govalidator.IsURL(curr) { return curr } } return "" }
func (n *NewChannel) Decode() error { n.URL = strings.Trim(n.URL, " ") if n.URL == "" || !govalidator.IsURL(n.URL) { return Errors{ "url": "Valid URL is required", } } return nil }
// CanHandle tells if the URL can be handled by this resolver func (gh *GithubFetcher) CanHandle(kubeware string) bool { if !govalidator.IsURL(kubeware) { return false } kubewareURL, err := url.Parse(kubeware) if err != nil { return false } return gh.canHandleURL(kubewareURL) }
func urlSubmitHandler(w http.ResponseWriter, r *http.Request) error { if err := r.ParseForm(); err != nil { return err } uschema := &URLSchema{} decoder := schema.NewDecoder() if err := decoder.Decode(uschema, r.PostForm); err != nil { return err } urlstring := uschema.URL tagsstring := uschema.Tags private := uschema.Private session, err := store.Get(r, "flashes") if err != nil { return err } if !govalidator.IsURL(urlstring) { errormessage := "URL is required" if urlstring != "" { errormessage = fmt.Sprintf("URL \"%s\" is not valid", urlstring) } session.AddFlash(errormessage, "danger") session.Save(r, w) http.Redirect(w, r, reverse("url-new"), http.StatusSeeOther) return nil } title, err := getPageTitle(urlstring) if err != nil { // <strike>Add flash about title not being fetchable</strike> // or alternatively add logic for detecting content type because it might be // an image or PDF session.AddFlash("Sorry! Could not fetch the page title!", "danger") session.Save(r, w) } url := &URL{ URL: urlstring, Title: title, Private: private, CreatedAt: time.Now(), UpdatedAt: time.Now(), } err = url.SaveWithTags(tagsstring) if err != nil { return err } http.Redirect(w, r, reverse("url-view", "id", url.ID), http.StatusSeeOther) return nil }
//validCallBackURL the Mpesa *sys does not check for this* added as a convinience func validCallBackURL(url string) validator { return func() *ProcessCheckoutResponse { if !govalidator.IsURL(url) { resp := new(ProcessCheckoutResponse) resp.ReturnCode = missingParameters resp.Description = "Invalid URL" resp.TransactionID = bson.NewObjectId().Hex() return resp } return nil } }
func (ui *tatui) setTatWebUIURL(str string) { str = strings.Replace(str, "/set-tatwebui-url ", "", 1) if str == "" { return } validURL := govalidator.IsURL(str) if !validURL { ui.msg.Text = "You entered an invalid URL" ui.render() return } viper.Set("tatwebui-url", str) ui.saveConfig() }
func buildCatalogCreationCheckRequest(repo string, branch string, number int, token string) *http.Request { url := fmt.Sprintf("https://api.github.com/repos/%s/contents/templates/%s/%d", repo, branch, number) if !govalidator.IsURL(url) { return nil } request, err := http.NewRequest("GET", url, nil) if err != nil { fmt.Print(err.Error()) return nil } request.SetBasicAuth(token, "x-oauth-basic") request.Close = true return request }
// Edits an existing Website in the database. func ApiWebsitesEdit(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { if !lib.IsLoggedIn(r) { SendJsonMessage(w, http.StatusUnauthorized, false, "Unauthorized.") return } // Get data from Request r.ParseForm() oldUrl := ps.ByName("url") name := r.Form.Get("name") protocol := r.Form.Get("protocol") url := r.Form.Get("url") method := r.Form.Get("checkMethod") // Simple Validation if oldUrl == "" || name == "" || protocol == "" || url == "" || method == "" { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Submit valid values.") return } if protocol != "http" && protocol != "https" { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Submit a valid protocol.") return } if !govalidator.IsURL(protocol + "://" + url) { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Submit a valid url.") return } if method != "HEAD" && method != "GET" { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Submit a valid check method.") return } // Update Database db := lib.GetDatabase() res, err := db.Exec("UPDATE websites SET name = ?, protocol = ?, url = ?, checkMethod = ? WHERE url = ?;", name, protocol, url, method, oldUrl) if err != nil { logging.MustGetLogger("").Error("Unable to edit Website: ", err) SendJsonMessage(w, http.StatusInternalServerError, false, "Unable to process your Request: "+err.Error()) return } // Check if exactly one Website has been edited rowsAffected, _ := res.RowsAffected() if rowsAffected == 1 { SendJsonMessage(w, http.StatusOK, true, "") } else { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Could not edit Website.") } }
func validateAlertmanagerURL() error { if cfg.notifier.AlertmanagerURL == "" { return nil } if ok := govalidator.IsURL(cfg.notifier.AlertmanagerURL); !ok { return fmt.Errorf("invalid Alertmanager URL: %s", cfg.notifier.AlertmanagerURL) } url, err := url.Parse(cfg.notifier.AlertmanagerURL) if err != nil { return err } if url.Scheme == "" { return fmt.Errorf("missing scheme in Alertmanager URL: %s", cfg.notifier.AlertmanagerURL) } return nil }
func validateAlertmanagerURL(u string) error { if u == "" { return nil } if ok := govalidator.IsURL(u); !ok { return fmt.Errorf("invalid Alertmanager URL: %s", u) } url, err := url.Parse(u) if err != nil { return err } if url.Scheme == "" { return fmt.Errorf("missing scheme in Alertmanager URL: %s", u) } return nil }
func parseInfluxdbURL() error { if cfg.influxdbURL == "" { return nil } if ok := govalidator.IsURL(cfg.influxdbURL); !ok { return fmt.Errorf("Invalid InfluxDB URL: %s", cfg.influxdbURL) } url, err := url.Parse(cfg.influxdbURL) if err != nil { return err } cfg.remote.InfluxdbURL = url return nil }
// parseSearchString accepts a raw string and generates a searchKeyword object func parseSearchString(rawSearchString string) (searchKeyword, error) { // TODO remove special characters and exclaimations if any rawSearchString = strings.Trim(rawSearchString, "/") // get rid of trailing slashes rawSearchString = strings.Trim(rawSearchString, "\"") parts := strings.Fields(rawSearchString) var res searchKeyword for _, part := range parts { // QueryUnescape is required in case of encoded url strings. // And does not harm regular search strings // but this processing is required because at this moment, we do not know if // search input is a regular string or a URL part, err := url.QueryUnescape(part) if err != nil { log.Warn(nil, map[string]interface{}{ "pkg": "search", "part": part, }, "unable to escape url!") } // IF part is for search with id:1234 // TODO: need to find out the way to use ID fields. if strings.HasPrefix(part, "id:") { res.id = append(res.id, strings.TrimPrefix(part, "id:")+":*A") } else if strings.HasPrefix(part, "type:") { typeName := strings.TrimPrefix(part, "type:") if len(typeName) == 0 { return res, errors.NewBadParameterError("Type name must not be empty", part) } res.workItemTypes = append(res.workItemTypes, typeName) } else if govalidator.IsURL(part) { part := strings.ToLower(part) part = trimProtocolFromURLString(part) searchQueryFromURL := getSearchQueryFromURLString(part) res.words = append(res.words, searchQueryFromURL) } else { part := strings.ToLower(part) part = sanitizeURL(part) res.words = append(res.words, part+":*") } } return res, nil }
// Inserts a new Website into the database. func ApiWebsitesAdd(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { if !lib.IsLoggedIn(r) { SendJsonMessage(w, http.StatusUnauthorized, false, "Unauthorized.") return } // Get data from Request r.ParseForm() name := r.Form.Get("name") protocol := r.Form.Get("protocol") url := ps.ByName("url") method := r.Form.Get("checkMethod") // Simple Validation if name == "" || protocol == "" || url == "" || method == "" { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Submit valid values.") return } if protocol != "http" && protocol != "https" { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Submit a valid protocol.") return } if !govalidator.IsURL(protocol + "://" + url) { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Submit a valid url.") return } if method != "HEAD" && method != "GET" { SendJsonMessage(w, http.StatusBadRequest, false, "Unable to process your Request: Submit a valid check method.") return } // Insert into Database db := lib.GetDatabase() _, err := db.Exec("INSERT INTO websites (name, protocol, url, checkMethod) VALUES (?, ?, ?, ?);", name, protocol, url, method) if err != nil { logging.MustGetLogger("").Error("Unable to add Website: ", err) SendJsonMessage(w, http.StatusInternalServerError, false, "Unable to process your Request: "+err.Error()) return } SendJsonMessage(w, http.StatusOK, true, "") }
func main() { // Parse CLI options startAddress := flag.String("start", "http://www.lemonde.fr/", "address to start from") recursionLevel := flag.Int("levels", 50, "depth of the web crawl") nbConcurrentFetchersArg := flag.Int("concurrentFetchers", 30, "number of fetchers to run concurrently") keepFragment := flag.Bool("fragment", false, "keep the fragment part of an URL. Example #top") keepQuery := flag.Bool("query", false, "keep the query part of an URL. Example ?foo=bar") stayOnDomain := flag.Bool("stayOnDomain", true, "do not crawl resources that are stored on another domain.") savePath := flag.String("savePath", "/tmp/crawl/", "where to saved crawled resources") flag.Parse() // Basic validation CLI arguments if !govalidator.IsURL(*startAddress) { panic("Expected a valid URL to start from") } depth := max(2, *recursionLevel) nbConcurrentFetchers := max(1, *nbConcurrentFetchersArg) // Create and launch the crawler fetcher := fetchers.NewWeb(*keepFragment, *keepQuery, *stayOnDomain) processor := processors.NewSaver(*savePath) crawler := crawlers.NewCrawler(fetcher, processor, nbConcurrentFetchers) crawler.Crawl(*startAddress, depth) }
func addRss(m Message, ws *websocket.Conn) { feedUrl := getArgument(m, 3, ws) if feedUrl != "" { feedUrl = feedUrl[1 : len(feedUrl)-1] if govalidator.IsURL(feedUrl) { channelFeeds := feeds[m.Channel] for _, f := range channelFeeds { if f.Url == feedUrl { sendMessage(m.Channel, "Already tracking that url here, no worries homie :snowboarder:", ws) return } } var newFeed FeedBundle var err error newFeed.Url = feedUrl newFeed.RssHandler, err = rss.Fetch(feedUrl) if err != nil { sendMessage(m.Channel, "Incorrect url :OoOoOoOo", ws) } else { createNewFeedChecker := false if _, exists := feeds[m.Channel]; !exists { createNewFeedChecker = true } feeds[m.Channel] = append(feeds[m.Channel], newFeed) sendMessage(m.Channel, "Yes, sir. "+feedUrl+" added.", ws) if createNewFeedChecker { go checkFeed(m.Channel, feeds[m.Channel], ws) } } } else { sendMessage(m.Channel, "Incorrect url :OoOoOoOo", ws) } } }
func (i IsURLChecker) IsFormat(data string) bool { return govalidator.IsURL(data) }
func fetch(url string, mode bool) { var ip string file := "report.txt" if mode { fmt.Print("Analyzing URL(s):\n") fmt.Print("Resolving URL:") } if govalidator.IsURL(url) { ipAddr, err := net.LookupIP(url) if mode { fmt.Print("Finished Domain Lookup\n") } if err != nil { fmt.Sprintf("ip lookup failed %s %v", ipAddr, err) } for i := 0; i < len(ipAddr); i++ { ip = ipAddr[i].String() } if mode { fmt.Print("Sending to VirusTotal: Awaiting Results\n") } if ip == "" { fmt.Println("-ip=<ip> fehlt!") os.Exit(0) } c := govt.Client{Apikey: apikey, Url: apiurl} // get a file report r, err := c.GetIpReport(ip) check(err) j, err := json.MarshalIndent(r, "", " ") if err != nil { fmt.Println("Formatting Error") return } // currDir, err := os.Getwd() if _, err := os.Stat(file); err == nil { if mode { fmt.Println("File Exists Moving to Reports Directory") } os.Mkdir("report", 0760) ioutil.WriteFile("report/"+url+"-report", j, 0664) } else { ioutil.WriteFile("report.txt", j, 0664) } check(err) if mode { fmt.Print("Report Generated\n") fmt.Println("IP Report:") } } else { fmt.Println("Invalid URL") os.Exit(-1) } }
// isUrlValid checks if url is a valid one, only check http: func isUrlValid(url string) bool { return len(url) >= 5 && url[:5] == "http:" && govalidator.IsURL(url) }
// Checks validity of URL func parseURL(url string) error { if !govalidator.IsURL(url) || !strings.HasPrefix(url, "http") { return fmt.Errorf("URL %s is not in the format of http(s)://<ip>:<port>", url) } return nil }
func authenticationHandler(res http.ResponseWriter, r *http.Request) { fn := "authenticationHandler" cors, app := ds.Guest(res, r) if !cors { return } ok, m := app.ReadRequestBody(fn, r) for ok { ok, session := app.ParseJWT(r) if !ok { break } user := &ds.UserStruct{} user.ID = session["id"] user.Iat = session["iat"] user.Exp = session["exp"] ok, _ = app.ParseTimestamp(user.Iat) if !ok { break } ok, _ = app.ParseTimestamp(user.Exp) if !ok { break } name := m["nickname"] photo := m["picture"] email := m["email"] if !govalidator.IsURL(photo) { app.Debug(fn, "INVALID PHOTO URL: "+photo) } if len(name) > 0 && len(photo) > 0 { ok, encoded := app.Base64CompressURL(fn, photo, 40) if ok { info := &ds.UserBasic{user.ID, name, photo, encoded, ""} ok, digest := app.DigestJSON(fn, info) if ok { info.Digest = digest if app.PutStruct(fn, user.ID, info) { app.Debug(fn, "UPDATED BASIC INFO") } } } } else { app.Debug(fn, "SKIPPING BASIC INFO UPDATE: "+name+"/"+photo) } if len(email) > 0 { if !app.PutStruct(fn, user.ID, &ds.EmailAddress{email}) { break } } if !app.PutStruct(fn, ds.SHA512([]byte(session["key"])), user) { break } app.Debug(fn, "GRANTED NEW SESSION TO USER: "******""); if !ok { break } ok, feed := app.GetFeeds(fn, "0", user.ID, user.ID); if !ok { break } ok, public := app.LookupPublicInfo(fn, user.ID, true); if !ok { break } var newToken = struct { PROFILE interface{} CONTACTS interface{} FEED interface{} APIKEY string }{ public, contacts, feed, session["key"], } */ var newToken = struct{ APIKEY string }{session["key"]} ok, _ = app.ServeJSON(res, fn, newToken) if !ok { break } // send info to channel ok, _, conversation := app.ReadContact(fn, user.ID, user.ID) if ok && len(conversation) > 0 { app.SendToChannel([]string{user.ID}, "updates", conversation, nil) } return } http.Error(res, "INVALID REQUEST", 500) }