func (route *ScraperRoute) StatusJob(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
	jobId := params.ByName("id")

	data := scraper.NewRedisScrapdata()
	resp, err := data.ScrapJob(jobId)
	if err != nil {
		HandleHttpErrors(w, err)
		return
	}

	Render().JSON(w, http.StatusOK, resp)

}
func (route *ScraperRoute) Selector(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
	var selector scraper.ScrapSelector
	err := RequestToJsonObject(r, &selector)
	if err != nil {
		HandleHttpErrors(w, err)
		return
	}

	rdata := scraper.NewRedisScrapdata()
	s, err := rdata.Selector(selector.Url, selector.Stype)
	if err != nil {
		if err == scraper.ErrSelectorNotFound {
			Render().JSON(w, http.StatusNotFound, "Url not register for scrapping")
			return
		}
		HandleHttpErrors(w, err)
		return
	}

	Render().JSON(w, http.StatusOK, s)

}
func (route *ScraperRoute) Log(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
	data := scraper.NewRedisScrapdata()
	resp := data.ScrapLog()
	Render().JSON(w, http.StatusOK, resp)

}