示例#1
0
func DatabaseExportCSV(w http.ResponseWriter, r *http.Request, proute routes.Proute) {
	params := proute.Params.(*DatabaseInfosParams)
	tx, err := db.DB.Beginx()
	if err != nil {
		log.Println("can't start transaction")
		userSqlError(w, err)
		return
	}

	// Datatabase isocode

	_user, _ := proute.Session.Get("user")
	user := _user.(model.User)

	dbName := ""

	err = tx.Get(&dbName, "SELECT name FROM \"database\" WHERE id = $1", params.Id)

	if err != nil {
		log.Println("Unable to export database")
		userSqlError(w, err)
		tx.Rollback()
		return
	}

	var sites []int

	err = tx.Select(&sites, "SELECT id FROM site where database_id = $1", params.Id)
	if err != nil {
		log.Println("Unable to export database")
		userSqlError(w, err)
		tx.Rollback()
		return
	}

	csvContent, err := export.SitesAsCSV(sites, user.First_lang_isocode, false, tx)

	if err != nil {
		log.Println("Unable to export database")
		userSqlError(w, err)
		tx.Rollback()
		return
	}

	err = tx.Commit()
	if err != nil {
		log.Println("Unable to export database")
		userSqlError(w, err)
		return
	}
	t := time.Now()
	filename := dbName + "-" + fmt.Sprintf("%d-%d-%d %d:%d:%d",
		t.Year(), t.Month(), t.Day(),
		t.Hour(), t.Minute(), t.Second()) + ".csv"
	w.Header().Set("Content-Type", "text/csv")
	w.Header().Set("Content-Disposition", "attachment; filename="+filename)
	w.Write([]byte(csvContent))
}
示例#2
0
func mapSearch(w http.ResponseWriter, r *http.Request, proute routes.Proute, tocsv bool) {
	// for measuring execution time
	start := time.Now()

	params := proute.Json.(*MapSearchParams)

	fmt.Println("params: ", params)

	tx, err := db.DB.Beginx()
	if err != nil {
		log.Println("can't start transaction")
		userSqlError(w, err)
		return
	}

	_user, _ := proute.Session.Get("user")
	user := _user.(model.User)

	//q := `SELECT site.id FROM site`

	filters := MapSqlQuery{}
	filters.Init()

	// custom hard coded / mandatory filters
	filters.AddTable(&MapSqlDefSite, "site", false)
	filters.AddTable(&MapSqlDefDatabase, "database", false)
	filters.AddFilter("database", `"database".published = true`)

	// add database filter
	filters.AddFilter("database", `"site".database_id IN (`+model.IntJoin(params.Database, true)+`)`)

	// Area filter
	fmt.Println(params.Area.Type, params.Area.Lat, params.Area.Lng, params.Area.Radius)
	if params.Area.Type == "disc" || params.Area.Type == "custom" {
		filters.AddFilter("site", `ST_DWithin("site".geom, Geography(ST_MakePoint($$, $$)), $$)`,
			params.Area.Lng, params.Area.Lat, params.Area.Radius)
	} else {
		filters.AddFilter("site", `ST_Within("site".geom::geometry, ST_SetSRID(ST_GeomFromGeoJSON($$),4326))`,
			params.Area.Geojson.Geometry)
	}

	// add centroid filter
	switch params.Others.Centroid {
	case "with":
		filters.AddFilter("site", `"site".centroid = true`)
	case "without":
		filters.AddFilter("site", `"site".centroid = false`)
	case "":
		// do not filter
	}

	// add knowledge filter
	str := ""
	for _, knowledge := range params.Others.Knowledges {
		switch knowledge {
		case "literature", "surveyed", "dig", "not_documented", "prospected_aerial", "prospected_pedestrian":
			if str == "" {
				str += "'" + knowledge + "'"
			} else {
				str += ",'" + knowledge + "'"
			}
		}
	}
	if str != "" {
		filters.AddTable(&MapSqlDefSiteRange, `site_range`, false)
		filters.AddTable(&MapSqlDefSiteRangeCharac, `site_range__charac`, false)
		filters.AddFilter("site_range__charac", `"site_range__charac".knowledge_type IN (`+str+`)`)
	}

	// add occupation filter
	str = ""
	for _, occupation := range params.Others.Knowledges {
		switch occupation {
		case "not_documented", "single", "continuous", "multiple":
			if str == "" {
				str += "'" + occupation + "'"
			} else {
				str += ",'" + occupation + "'"
			}
		}
	}
	if str != "" {
		filters.AddFilter("site", `"site".occupation IN (`+str+`)`)
	}

	// text filter
	if params.Others.TextSearch != "" {
		str = "1=0"
		args := []interface{}{}
		for _, textSearchIn := range params.Others.TextSearchIn {
			switch textSearchIn {
			case "site_name":
				args = append(args, "%"+params.Others.TextSearch+"%")
				str += ` OR "site".name ILIKE $$`
			case "city_name":
				args = append(args, "%"+params.Others.TextSearch+"%")
				str += ` OR "site".city_name ILIKE $$`
			case "bibliography":
				args = append(args, "%"+params.Others.TextSearch+"%")
				filters.AddTable(&MapSqlDefSiteRange, `site_range`, false)
				filters.AddTable(&MapSqlDefSiteRangeCharac, `site_range__charac`, false)
				filters.AddTable(&MapSqlDefSiteRangeCharacTr, `site_range__charac_tr`, false)
				str += ` OR "site_range__charac_tr".bibliography ILIKE $$`
			case "comment":
				args = append(args, "%"+params.Others.TextSearch+"%")
				filters.AddTable(&MapSqlDefSiteRange, `site_range`, false)
				filters.AddTable(&MapSqlDefSiteRangeCharac, `site_range__charac`, false)
				filters.AddTable(&MapSqlDefSiteRangeCharacTr, `site_range__charac_tr`, false)
				str += ` OR "site_range__charac_tr".comment ILIKE $$`
			}
		}
		if str != "1=0" {
			filters.AddFilter("site", str, args...)
		}
	}

	// characs filters
	includes := make(map[int][]int, 0)
	excludes := make(map[int][]int, 0)
	exceptionals := make(map[int][]int, 0)

	for characid, sel := range params.Characs {
		if sel.Include && !sel.Exceptional {
			if _, ok := includes[sel.RootId]; !ok {
				includes[sel.RootId] = make([]int, 0)
			}
			includes[sel.RootId] = append(includes[sel.RootId], characid)
		} else if sel.Include && sel.Exceptional {
			if _, ok := exceptionals[sel.RootId]; !ok {
				exceptionals[sel.RootId] = make([]int, 0)
			}
			exceptionals[sel.RootId] = append(exceptionals[sel.RootId], characid)
		} else if !sel.Include {
			if _, ok := excludes[sel.RootId]; !ok {
				excludes[sel.RootId] = make([]int, 0)
			}
			excludes[sel.RootId] = append(excludes[sel.RootId], characid)
		}
	}

	if params.Others.CharacsLinked == "all" {
		for rootid, characids := range includes {
			tableas := "site_range__charac_" + strconv.Itoa(rootid)
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, tableas, false)
			filters.AddFilter(tableas, tableas+`.charac_id IN (`+model.IntJoin(characids, true)+`)`)
		}

		for rootid, characids := range exceptionals {
			tableas := "site_range__charac_" + strconv.Itoa(rootid)
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, tableas, false)

			q := "1=0"
			for _, characid := range characids {
				q += " OR " + tableas + ".charac_id = " + strconv.Itoa(characid) + " AND " + tableas + ".exceptional = true"
			}

			filters.AddFilter(tableas, q)
		}

		for rootid, characids := range excludes {
			tableas := "x_site_range__charac_" + strconv.Itoa(rootid)
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, tableas, true)
			filters.AddFilter(tableas, tableas+".charac_id IN ("+model.IntJoin(characids, true)+")")
		}

	} else if params.Others.CharacsLinked == "at-least-one" { // default
		s_includes := []int{}
		s_excludes := []int{}
		s_exceptionals := []int{}

		for _, characids := range includes {
			s_includes = append(s_includes, characids...)
		}
		for _, characids := range excludes {
			s_excludes = append(s_excludes, characids...)
		}
		for _, characids := range exceptionals {
			s_exceptionals = append(s_exceptionals, characids...)
		}

		if len(s_includes) > 0 {
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, "site_range__charac", false)
			filters.AddFilter("site_range__charac", `site_range__charac.charac_id IN (`+model.IntJoin(s_includes, true)+`)`)
		}

		if len(s_excludes) > 0 {
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, "x_site_range__charac", true)
			filters.AddFilter("x_site_range__charac", `x_site_range__charac.charac_id IN (`+model.IntJoin(s_includes, true)+`)`)
		}

		if len(s_exceptionals) > 0 {
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, "site_range__charac", false)
			q := "1=0"
			for _, characid := range s_exceptionals {
				q += " OR site_range__charac.charac_id = " + strconv.Itoa(characid) + " AND site_range__charac.exceptional = true"
			}
			filters.AddFilter("site_range__charac", q)
		}
	}

	/*
		if len(excludes) > 0 {
			filters.AddExclude("site_range__charac", "x_site_range__charac.charac_id IN ("+model.IntJoin(excludes, true)+")")
		}
	*/
	for _, chronology := range params.Chronologies {

		q := "1=1"

		var start_date_str = strconv.Itoa(chronology.StartDate)
		var end_date_str = strconv.Itoa(chronology.EndDate)

		var tblname string
		if chronology.ExistenceInsideInclude == "+" {
			tblname = "site"
		} else if chronology.ExistenceInsideInclude == "-" {
			tblname = "x_site"
		} else {
			log.Println("ExistenceInsideInclude is bad : ", chronology.ExistenceInsideInclude)
			_ = tx.Rollback()
			return
		}

		switch chronology.ExistenceInsideSureness {
		case "potentially":
			q += " AND " + tblname + ".start_date1 <= " + end_date_str + " AND " + tblname + ".end_date2 >= " + start_date_str
			if chronology.ExistenceInsidePart == "full" {
				q += " AND " + tblname + ".start_date1 >= " + start_date_str + " AND " + tblname + ".end_date2 <= " + end_date_str
			}
		case "certainly":
			q += " AND " + tblname + ".start_date2 <= " + end_date_str + " AND " + tblname + ".end_date1 >= " + start_date_str
			if chronology.ExistenceInsidePart == "full" {
				q += " AND " + tblname + ".start_date2 >= " + start_date_str + " AND " + tblname + ".end_date1 <= " + end_date_str
			}
		case "potentially-only":
			q += " AND " + tblname + ".start_date1 <= " + end_date_str + " AND " + tblname + ".end_date2 >= " + start_date_str
			q += " AND " + tblname + ".start_date2 > " + end_date_str + " AND " + tblname + ".end_date1 < " + start_date_str

			if chronology.ExistenceInsidePart == "full" {
				q += " AND " + tblname + ".start_date1 >= " + start_date_str + " AND " + tblname + ".end_date2 <= " + end_date_str
			}
		}

		switch chronology.ExistenceOutsideInclude {
		case "": // it can
			// do nothing
		case "+": // it must
			switch chronology.ExistenceOutsideSureness {
			case "potentially":
				q += " AND (" + tblname + ".start_date2 < " + start_date_str + " OR " + tblname + ".end_date1 >= " + end_date_str + ")"
			case "certainly":
				q += " AND (" + tblname + ".start_date1 < " + start_date_str + " OR " + tblname + ".end_date1 >= " + end_date_str + ")"
			case "potentially-only":
				q += " AND (" + tblname + ".start_date2 < " + start_date_str + " AND " + tblname + ".start_date1 >= " + start_date_str
				q += " OR " + tblname + ".end_date1 > " + end_date_str + " AND " + tblname + ".end_date2 <= " + end_date_str + ")"
			}

		case "-": // it must not
			switch chronology.ExistenceOutsideSureness {
			case "potentially":
				q += " AND NOT (" + tblname + ".start_date2 < " + start_date_str + " OR " + tblname + ".end_date1 >= " + end_date_str + ")"
			case "certainly":
				q += " AND NOT (" + tblname + ".start_date1 < " + start_date_str + " OR " + tblname + ".end_date1 >= " + end_date_str + ")"
			case "potentially-only":
				q += " AND NOT (" + tblname + ".start_date2 < " + start_date_str + " AND " + tblname + ".start_date1 >= " + start_date_str
				q += " OR " + tblname + ".end_date1 > " + end_date_str + " AND " + tblname + ".end_date2 <= " + end_date_str + ")"
			}
		}

		if q != "1=1" {
			if chronology.ExistenceInsideInclude == "+" {
				filters.AddFilter("site", q)
			} else if chronology.ExistenceInsideInclude == "-" {
				filters.AddTable(&MapSqlDefXSite, "x_site", true)
				filters.AddFilter("x_site", q)
			}
		}
	}

	q, q_args := filters.BuildQuery()
	fmt.Println("q: ", q, q_args)

	site_ids := []int{}
	err = tx.Select(&site_ids, q, q_args...)
	if err != nil {
		fmt.Println("query failed : ", err)
		userSqlError(w, err)
		_ = tx.Rollback()
		return
	}
	//fmt.Println("site_ids : ", site_ids)

	elapsed := time.Since(start)
	fmt.Printf("Search took %s", elapsed)

	res := ""
	if tocsv {
		fmt.Println("ICI")
		w.Header().Set("Content-Type", "text/csv")
		csvContent, err := export.SitesAsCSV(site_ids, user.First_lang_isocode, true, tx)
		if err != nil {
			log.Println("can't export query as csv")
			userSqlError(w, err)
			return
		}
		w.Header().Set("Content-Type", "text/csv")
		w.Header().Set("Content-Disposition", "attachment; filename=export.csv")
		w.Write([]byte(csvContent))
	} else {
		w.Header().Set("Content-Type", "application/json")
		res = mapGetSitesAsJson(site_ids, tx)
	}
	//mapDebug(site_ids, tx)

	err = tx.Commit()
	if err != nil {
		log.Println("can't commit")
		userSqlError(w, err)
		return
	}

	w.Write([]byte(res))
	return
}