コード例 #1
0
ファイル: parser.go プロジェクト: uve/localization.expert
func validateDomain(base *url.URL, link *url.URL) error {
	if !link.IsAbs() {
		return fmt.Errorf("Validated domain error: url is not absolute: %s", link)
	}

	if base.Host == "" {
		return fmt.Errorf("Validated domain error: empty domain: %s", link)
	}
	/*
	   if (base.Host != link.Host) {
	       return fmt.Errorf("Validated domain error: different domain: %s", link)
	   }*/

	err := isDomainBlacklisted(link)
	if err != nil {
		return err
	}

	err = isExtenstionBlacklisted(link)
	if err != nil {
		return err
	}

	return nil
}
コード例 #2
0
ファイル: main.go プロジェクト: DevSpouk/crawler
func parseLinks(urlToGet *url.URL, content string) ([]string, error) {
	var (
		err       error
		links     []string = make([]string, 0)
		matches   [][]string
		findLinks = regexp.MustCompile("<a.*?href=\"(.*?)\"")
	) //var

	// Retrieve all anchor tag URLs from string
	matches = findLinks.FindAllStringSubmatch(content, -1)

	for _, val := range matches {
		var linkUrl *url.URL

		// Parse the anchr tag URL
		if linkUrl, err = url.Parse(val[1]); err != nil {
			return links, err
		} //if

		// If the URL is absolute, add it to the slice
		// If the URL is relative, build an absolute URL
		if linkUrl.IsAbs() {
			links = append(links, linkUrl.String())
		} else {
			links = append(links, urlToGet.Scheme+"://"+urlToGet.Host+linkUrl.String())
		} //else
	} //for

	return links, err
} //parseImages
コード例 #3
0
ファイル: main.go プロジェクト: DevSpouk/crawler
func parseImages(urlToGet *url.URL, content string) ([]string, error) {
	var (
		err        error
		imgs       []string
		matches    [][]string
		findImages = regexp.MustCompile("<img.*?src=\"(.*?)\"")
	) //var

	// Retrieve all image URLs from string
	matches = findImages.FindAllStringSubmatch(content, -1)

	for _, val := range matches {
		var imgUrl *url.URL

		// Parse the image URL
		if imgUrl, err = url.Parse(val[1]); err != nil {
			return imgs, err
		} //if

		// If the URL is absolute, add it to the slice
		// If the URL is relative, build an absolute URL
		if imgUrl.IsAbs() {
			imgs = append(imgs, imgUrl.String())
		} else {
			imgs = append(imgs, urlToGet.Scheme+"://"+urlToGet.Host+imgUrl.String())
		} //else
	} //for

	return imgs, err
} //parseImages
コード例 #4
0
ファイル: data.go プロジェクト: hairyhenderson/gomplate
// ParseSource -
func ParseSource(value string) (*Source, error) {
	var (
		alias  string
		srcURL *url.URL
	)
	parts := strings.SplitN(value, "=", 2)
	if len(parts) == 1 {
		f := parts[0]
		alias = strings.SplitN(value, ".", 2)[0]
		if path.Base(f) != f {
			err := fmt.Errorf("Invalid datasource (%s). Must provide an alias with files not in working directory.", value)
			return nil, err
		}
		srcURL = absURL(f)
	} else if len(parts) == 2 {
		alias = parts[0]
		var err error
		srcURL, err = url.Parse(parts[1])
		if err != nil {
			return nil, err
		}

		if !srcURL.IsAbs() {
			srcURL = absURL(parts[1])
		}
	}

	s := NewSource(alias, srcURL)
	return s, nil
}
コード例 #5
0
ファイル: feed.go プロジェクト: urandom/readeef
func addFeeds(user content.User, fm *readeef.FeedManager, links []string) (resp responseError) {
	resp = newResponse()

	var err error
	errs := make([]addFeedError, 0, len(links))

	for _, link := range links {
		var u *url.URL
		if u, err = url.Parse(link); err != nil {
			resp.err = err
			errs = append(errs, addFeedError{Link: link, Error: "Error parsing link"})
			continue
		} else if !u.IsAbs() {
			resp.err = errors.New("Feed has no link")
			errs = append(errs, addFeedError{Link: link, Error: resp.err.Error()})
			continue
		} else {
			var f content.Feed
			if f, err = fm.AddFeedByLink(link); err != nil {
				resp.err = err
				errs = append(errs, addFeedError{Link: link, Error: "Error adding feed to the database"})
				continue
			}

			uf := user.AddFeed(f)
			if uf.HasErr() {
				resp.err = f.Err()
				errs = append(errs, addFeedError{Link: link, Title: f.Data().Title, Error: "Error adding feed to the database"})
				continue
			}

			tags := strings.SplitN(u.Fragment, ",", -1)
			if u.Fragment != "" && len(tags) > 0 {
				repo := uf.Repo()
				tf := repo.TaggedFeed(user)
				tf.Data(uf.Data())

				t := make([]content.Tag, len(tags))
				for i := range tags {
					t[i] = repo.Tag(user)
					t[i].Data(data.Tag{Value: data.TagValue(tags[i])})
				}

				tf.Tags(t)
				if tf.UpdateTags(); tf.HasErr() {
					resp.err = tf.Err()
					errs = append(errs, addFeedError{Link: link, Title: f.Data().Title, Error: "Error adding feed to the database"})
					continue
				}
			}
		}
	}

	resp.val["Errors"] = errs
	resp.val["Success"] = len(errs) < len(links)
	return
}
コード例 #6
0
ファイル: debug.go プロジェクト: simonz05/cw
func debugUrl(u *url.URL) {
	logger.Println("Host:", u.Host)
	logger.Println("Path:", u.Path)
	logger.Println("Request URI:", u.RequestURI())
	logger.Println("Scheme:", u.Scheme)
	logger.Println("Query:", u.RawQuery)
	logger.Println("Fragment:", u.Fragment)
	logger.Println("IsAbs:", u.IsAbs())
}
コード例 #7
0
ファイル: feed.go プロジェクト: RavenB/readeef
func addFeed(user content.User, fm *readeef.FeedManager, links []string) (resp responseError) {
	resp = newResponse()

	success := false

	for _, link := range links {
		var u *url.URL
		if u, resp.err = url.Parse(link); resp.err != nil {
			/* TODO: non-fatal error */
			return
		} else if !u.IsAbs() {
			/* TODO: non-fatal error */
			resp.err = errors.New("Feed has no link")
			return
		} else {
			var f content.Feed
			if f, resp.err = fm.AddFeedByLink(link); resp.err != nil {
				return
			}

			uf := user.AddFeed(f)
			if uf.HasErr() {
				resp.err = f.Err()
				return
			}

			tags := strings.SplitN(u.Fragment, ",", -1)
			if u.Fragment != "" && len(tags) > 0 {
				repo := uf.Repo()
				tf := repo.TaggedFeed(user)
				tf.Data(uf.Data())

				t := make([]content.Tag, len(tags))
				for i := range tags {
					t[i] = repo.Tag(user)
					t[i].Value(data.TagValue(tags[i]))
				}

				tf.Tags(t)
				if tf.UpdateTags(); tf.HasErr() {
					resp.err = tf.Err()
					return
				}
			}

			success = true
		}
	}

	resp.val["Success"] = success
	return
}
コード例 #8
0
ファイル: handlers_test.go プロジェクト: shihuacai1989/pushgo
func endpointIds(uri *url.URL) (deviceId, channelId string, ok bool) {
	if !uri.IsAbs() {
		ok = false
		return
	}
	pathPrefix := "/update/"
	i := strings.Index(uri.Path, pathPrefix)
	if i < 0 {
		ok = false
		return
	}
	key := strings.SplitN(uri.Path[i+len(pathPrefix):], ".", 2)
	if len(key) < 2 {
		ok = false
		return
	}
	return key[0], key[1], true
}
コード例 #9
0
ファイル: provider.go プロジェクト: Ablu/drone
// Combine a URL and Request to make the URL absolute
func makeURLAbs(url *url.URL, request *http.Request) {
	if !url.IsAbs() {
		url.Host = request.Host
		if request.TLS != nil || request.Header.Get("X-Forwarded-Proto") == "https" {
			url.Scheme = "https"
		} else {
			url.Scheme = "http"
		}
	}
}
コード例 #10
0
ファイル: oauth.go プロジェクト: jacobpgallagher/oauth
// Combine a URL and Request to make the URL absolute
func makeURLAbs(url *url.URL, request *http.Request) {
	if !url.IsAbs() {
		url.Host = request.Host
		if strings.HasPrefix(request.Proto, "HTTP/") {
			url.Scheme = "http"
		} else {
			url.Scheme = "https"
		}
	}
}
コード例 #11
0
ファイル: ip_range.go プロジェクト: pxie/loggregator
func IpOutsideOfRanges(testURL url.URL, ranges []IPRange) (bool, error) {
	if !testURL.IsAbs() {
		return false, errors.New(fmt.Sprintf("Missing protocol for url: %s", testURL))
	}
	host := strings.Split(testURL.Host, ":")[0]
	ipAddress := net.ParseIP(host)
	if ipAddress == nil {
		ipAddr, err := net.ResolveIPAddr("ip", host)
		if err != nil {
			return false, errors.New(fmt.Sprintf("Resolving host failed: %s", err))
		}
		ipAddress = net.ParseIP(ipAddr.String())
	}

	for _, ipRange := range ranges {
		if bytes.Compare(ipAddress, net.ParseIP(ipRange.Start)) >= 0 && bytes.Compare(ipAddress, net.ParseIP(ipRange.End)) <= 0 {
			return false, nil
		}
	}
	return true, nil
}
コード例 #12
0
ファイル: gowsdl.go プロジェクト: danmux/gowsdl
// add in any schemas that have any includes
func (g *GoWSDL) resolveXSDExternals(schema *XSDSchema, url *url.URL) error {
	for _, incl := range schema.Includes {
		log.Println("resolving: ", incl.SchemaLocation)

		location, err := url.Parse(incl.SchemaLocation)
		if err != nil {
			return err
		}

		_, schemaName := filepath.Split(location.Path)
		log.Println("resolving schema name: ", schemaName)
		if g.resolvedXSDExternals[schemaName] { // memoisation
			continue
		}

		schemaLocation := location.String()
		if !location.IsAbs() {
			if !url.IsAbs() {
				return fmt.Errorf("Unable to resolve external schema %s through WSDL URL %s", schemaLocation, url)
			}
			schemaLocation = url.Scheme + "://" + url.Host + schemaLocation
		}

		log.Println("Downloading external schema", "location", schemaLocation)

		data, err := downloadFile(schemaLocation, g.ignoreTLS, g.localRoot)
		newschema := new(XSDSchema)

		err = xml.Unmarshal(data, newschema)
		if err != nil {
			log.Println(err)
			return err
		}

		if len(newschema.Includes) > 0 &&
			maxRecursion > g.currentRecursionLevel {

			g.currentRecursionLevel++

			log.Printf("Entering recursion %d\n", g.currentRecursionLevel)
			g.resolveXSDExternals(newschema, url)
		}

		g.wsdl.Types.Schemas = append(g.wsdl.Types.Schemas, newschema)

		if g.resolvedXSDExternals == nil {
			g.resolvedXSDExternals = make(map[string]bool, maxRecursion)
		}
		g.resolvedXSDExternals[schemaName] = true
	}

	return nil
}
コード例 #13
0
ファイル: urls.go プロジェクト: ivan1993spb/ru-supplier
func generateURL(URL *url.URL, host string) *url.URL {
	if !URL.IsAbs() {
		return nil
	}
	if URL.Scheme != _URL_REQUIRED_SCHEME {
		return nil
	}
	if URL.Host != _URL_REQUIRED_HOST {
		if URL.Host != _URL_REQUIRED_ALIAS_HOST {
			return nil
		}
	}
	if path, ok := Paths[URL.Path]; ok {
		URL.Path = path
	} else {
		return nil
	}

	vals := URL.Query()
	if URL.Path == _URL_REQUIRED_QUICK_SEARCH_PATH {
		vals.Set("quickSearch", "true")
	} else {
		vals.Set("quickSearch", "false")
	}
	vals.Set("sortBy", _URL_REQUIRED_SORTING_TYPE)
	vals.Set("sortDirection", _URL_REQUIRED_SORTING_DIRECTION)
	vals.Set("userId", "null")
	vals.Set("conf", "true;true;true;true;true;true;true;true;"+
		"true;true;true;true;true;true;true;true;true;true;")
	URL.RawQuery = vals.Encode()

	return &url.URL{
		Scheme: "http",
		Host:   host,
		Path:   "/rss",
		RawQuery: url.Values{
			"url": {URL.String()},
		}.Encode(),
	}
}
コード例 #14
0
func (g *GoWsdl) resolveXsdExternals(schema *XsdSchema, url *url.URL) error {
	for _, incl := range schema.Includes {
		location, err := url.Parse(incl.SchemaLocation)
		if err != nil {
			return err
		}

		_, schemaName := filepath.Split(location.Path)
		if g.resolvedXsdExternals[schemaName] {
			continue
		}

		schemaLocation := location.String()
		if !location.IsAbs() {
			if !url.IsAbs() {
				return errors.New(fmt.Sprintf("Unable to resolve external schema %s through WSDL URL %s", schemaLocation, url))
			}
			schemaLocation = url.Scheme + "://" + url.Host + schemaLocation
		}

		Log.Info("Downloading external schema", "location", schemaLocation)

		data, err := downloadFile(schemaLocation, g.ignoreTls)
		newschema := &XsdSchema{}

		err = xml.Unmarshal(data, newschema)
		if err != nil {
			return err
		}

		if len(newschema.Includes) > 0 &&
			maxRecursion > g.currentRecursionLevel {

			g.currentRecursionLevel++

			//log.Printf("Entering recursion %d\n", g.currentRecursionLevel)
			g.resolveXsdExternals(newschema, url)
		}

		g.wsdl.Types.Schemas = append(g.wsdl.Types.Schemas, newschema)

		if g.resolvedXsdExternals == nil {
			g.resolvedXsdExternals = make(map[string]bool, maxRecursion)
		}
		g.resolvedXsdExternals[schemaName] = true
	}

	return nil
}
コード例 #15
0
ファイル: url.go プロジェクト: telemetryapp/goluago
func urlIsAbs(u *url.URL) lua.Function {
	return func(l *lua.State) int {
		l.PushBoolean(u.IsAbs())
		return 1
	}
}
コード例 #16
0
ファイル: feed.go プロジェクト: yourchanges/readeef
func (con Feed) Handler(c context.Context) http.HandlerFunc {
	return func(w http.ResponseWriter, r *http.Request) {
		var err error

		db := readeef.GetDB(c)
		user := readeef.GetUser(c, r)

		action := webfw.GetMultiPatternIdentifier(c, r)
		params := webfw.GetParams(c, r)
		resp := make(map[string]interface{})

	SWITCH:
		switch action {
		case "list":
			var feeds []readeef.Feed
			feeds, err = db.GetUserTagsFeeds(user)

			if err != nil {
				break
			}

			respFeeds := []feed{}

			for _, f := range feeds {
				respFeeds = append(respFeeds, feed{
					Id: f.Id, Title: f.Title, Description: f.Description,
					Link: f.Link, Image: f.Image, Tags: f.Tags,
					UpdateError: f.UpdateError, SubscribeError: f.SubscribeError,
				})
			}

			resp["Feeds"] = respFeeds
		case "discover":
			r.ParseForm()

			link := r.FormValue("url")
			var u *url.URL

			/* TODO: non-fatal error */
			if u, err = url.Parse(link); err != nil {
				break
				/* TODO: non-fatal error */
			} else if !u.IsAbs() {
				u.Scheme = "http"
				if u.Host == "" {
					parts := strings.SplitN(u.Path, "/", 2)
					u.Host = parts[0]
					if len(parts) > 1 {
						u.Path = "/" + parts[1]
					} else {
						u.Path = ""
					}
				}
				link = u.String()
			}

			var feeds []readeef.Feed
			feeds, err = con.fm.DiscoverFeeds(link)
			if err != nil {
				break
			}

			var userFeeds []readeef.Feed
			userFeeds, err = db.GetUserFeeds(user)
			if err != nil {
				break
			}

			userFeedIdMap := make(map[int64]bool)
			userFeedLinkMap := make(map[string]bool)
			for _, f := range userFeeds {
				userFeedIdMap[f.Id] = true
				userFeedLinkMap[f.Link] = true

				u, err := url.Parse(f.Link)
				if err == nil && strings.HasPrefix(u.Host, "www.") {
					u.Host = u.Host[4:]
					userFeedLinkMap[u.String()] = true
				}
			}

			respFeeds := []feed{}
			for _, f := range feeds {
				if !userFeedIdMap[f.Id] && !userFeedLinkMap[f.Link] {
					respFeeds = append(respFeeds, feed{
						Id: f.Id, Title: f.Title, Description: f.Description,
						Link: f.Link, Image: f.Image,
					})
				}
			}

			resp["Feeds"] = respFeeds
		case "opml":
			buf := util.BufferPool.GetBuffer()
			defer util.BufferPool.Put(buf)

			buf.ReadFrom(r.Body)

			var opml parser.Opml
			opml, err = parser.ParseOpml(buf.Bytes())
			if err != nil {
				break
			}

			var userFeeds []readeef.Feed
			userFeeds, err = db.GetUserFeeds(user)
			if err != nil {
				break
			}

			userFeedMap := make(map[int64]bool)
			for _, f := range userFeeds {
				userFeedMap[f.Id] = true
			}

			var feeds []readeef.Feed
			for _, opmlFeed := range opml.Feeds {
				var discovered []readeef.Feed

				discovered, err = con.fm.DiscoverFeeds(opmlFeed.Url)
				if err != nil {
					continue
				}

				for _, f := range discovered {
					if !userFeedMap[f.Id] {
						if len(opmlFeed.Tags) > 0 {
							f.Link += "#" + strings.Join(opmlFeed.Tags, ",")
						}

						feeds = append(feeds, f)
					}
				}
			}

			respFeeds := []feed{}
			for _, f := range feeds {
				respFeeds = append(respFeeds, feed{
					Id: f.Id, Title: f.Title, Description: f.Description,
					Link: f.Link, Image: f.Image,
				})
			}
			resp["Feeds"] = respFeeds
		case "add":
			r.ParseForm()
			links := r.Form["url"]
			success := false

			for _, link := range links {
				/* TODO: non-fatal error */
				var u *url.URL
				if u, err = url.Parse(link); err != nil {
					break SWITCH
					/* TODO: non-fatal error */
				} else if !u.IsAbs() {
					err = errors.New("Feed has no link")
					break SWITCH
				}

				var f readeef.Feed
				f, err = con.fm.AddFeedByLink(link)
				if err != nil {
					break SWITCH
				}

				f, err = db.CreateUserFeed(readeef.GetUser(c, r), f)
				if err != nil {
					break SWITCH
				}

				tags := strings.SplitN(u.Fragment, ",", -1)
				if u.Fragment != "" && len(tags) > 0 {
					err = db.CreateUserFeedTag(f, tags...)
				}

				success = true
			}

			resp["Success"] = success
		case "remove":
			var id int64
			id, err = strconv.ParseInt(params["feed-id"], 10, 64)

			/* TODO: non-fatal error */
			if err != nil {
				break
			}

			var feed readeef.Feed
			feed, err = db.GetUserFeed(id, user)
			/* TODO: non-fatal error */
			if err != nil {
				break
			}

			err = db.DeleteUserFeed(feed)
			/* TODO: non-fatal error */
			if err != nil {
				break
			}

			con.fm.RemoveFeed(feed)

			resp["Success"] = true
		case "tags":
			var id int64
			id, err = strconv.ParseInt(params["feed-id"], 10, 64)

			/* TODO: non-fatal error */
			if err != nil {
				break
			}

			var feed readeef.Feed
			feed, err = db.GetUserFeed(id, user)
			/* TODO: non-fatal error */
			if err != nil {
				break
			}

			if r.Method == "GET" {
				resp["Tags"] = feed.Tags
			} else if r.Method == "POST" {
				var tags []string

				tags, err = db.GetUserFeedTags(user, feed)
				if err != nil {
					break
				}

				err = db.DeleteUserFeedTag(feed, tags...)
				if err != nil {
					break
				}

				decoder := json.NewDecoder(r.Body)

				tags = []string{}
				err = decoder.Decode(&tags)
				if err != nil {
					break
				}

				err = db.CreateUserFeedTag(feed, tags...)
				if err != nil {
					break
				}

				resp["Success"] = true
				resp["Id"] = feed.Id
			}
		case "read":
			feedId := params["feed-id"]
			timestamp := params["timestamp"]

			var seconds int64
			seconds, err = strconv.ParseInt(timestamp, 10, 64)
			/* TODO: non-fatal error */
			if err != nil {
				break
			}

			t := time.Unix(seconds/1000, 0)

			switch {
			case feedId == "tag:__all__":
				err = db.MarkUserArticlesByDateAsRead(user, t, true)
			case feedId == "__favorite__":
				// Favorites are assumbed to have been read already
			case strings.HasPrefix(feedId, "tag:"):
				tag := feedId[4:]
				err = db.MarkUserTagArticlesByDateAsRead(user, tag, t, true)
			default:
				var id int64

				id, err = strconv.ParseInt(feedId, 10, 64)
				/* TODO: non-fatal error */
				if err != nil {
					break SWITCH
				}

				var feed readeef.Feed
				feed, err = db.GetUserFeed(id, user)
				/* TODO: non-fatal error */
				if err != nil {
					break SWITCH
				}

				err = db.MarkFeedArticlesByDateAsRead(feed, t, true)
			}

			if err == nil {
				resp["Success"] = true
			}
		case "articles":
			var articles []readeef.Article

			var limit, offset int

			feedId := params["feed-id"]

			limit, err = strconv.Atoi(params["limit"])
			/* TODO: non-fatal error */
			if err != nil {
				break
			}

			offset, err = strconv.Atoi(params["offset"])
			/* TODO: non-fatal error */
			if err != nil {
				break
			}

			newerFirst := params["newer-first"] == "true"
			unreadOnly := params["unread-only"] == "true"

			if limit > 50 {
				limit = 50
			}

			if feedId == "__favorite__" {
				if newerFirst {
					articles, err = db.GetUserFavoriteArticlesDesc(user, limit, offset)
				} else {
					articles, err = db.GetUserFavoriteArticles(user, limit, offset)
				}
				if err != nil {
					break
				}
			} else if feedId == "tag:__all__" {
				if newerFirst {
					if unreadOnly {
						articles, err = db.GetUnreadUserArticlesDesc(user, limit, offset)
					} else {
						articles, err = db.GetUserArticlesDesc(user, limit, offset)
					}
				} else {
					if unreadOnly {
						articles, err = db.GetUnreadUserArticles(user, limit, offset)
					} else {
						articles, err = db.GetUserArticles(user, limit, offset)
					}
				}
				if err != nil {
					break
				}
			} else if strings.HasPrefix(feedId, "tag:") {
				tag := feedId[4:]
				if newerFirst {
					if unreadOnly {
						articles, err = db.GetUnreadUserTagArticlesDesc(user, tag, limit, offset)
					} else {
						articles, err = db.GetUserTagArticlesDesc(user, tag, limit, offset)
					}
				} else {
					if unreadOnly {
						articles, err = db.GetUnreadUserTagArticles(user, tag, limit, offset)
					} else {
						articles, err = db.GetUserTagArticles(user, tag, limit, offset)
					}
				}
				if err != nil {
					break
				}
			} else {
				var f readeef.Feed

				var id int64
				id, err = strconv.ParseInt(feedId, 10, 64)

				if err != nil {
					err = errors.New("Unknown feed id " + feedId)
					break
				}

				f, err = db.GetFeed(id)
				/* TODO: non-fatal error */
				if err != nil {
					break
				}

				f.User = user

				if newerFirst {
					if unreadOnly {
						f, err = db.GetUnreadFeedArticlesDesc(f, limit, offset)
					} else {
						f, err = db.GetFeedArticlesDesc(f, limit, offset)
					}
				} else {
					if unreadOnly {
						f, err = db.GetUnreadFeedArticles(f, limit, offset)
					} else {
						f, err = db.GetFeedArticles(f, limit, offset)
					}
				}
				if err != nil {
					break
				}

				articles = f.Articles
			}

			resp["Articles"] = articles
		}

		var b []byte
		if err == nil {
			b, err = json.Marshal(resp)
		}

		if err == nil {
			w.Write(b)
		} else {
			webfw.GetLogger(c).Print(err)

			w.WriteHeader(http.StatusInternalServerError)
		}

	}
}
コード例 #17
0
ファイル: urlutil.go プロジェクト: rainycape/gondola
// isAbs returns true of the URL is absolute.
// This function considers protocol-relative
// URLs to be absolute
func isAbs(u *url.URL) bool {
	return u.IsAbs() || strings.HasPrefix(u.String(), "//")
}
コード例 #18
0
func IsAbsoluteURL(url *url.URL) bool {
	return url.IsAbs()
}