Esempio n. 1
0
func getWmLayers(params *LayersParams, viewUnpublished bool, tx *sqlx.Tx) (layers []*model.LayerFullInfos, err error) {

	layers = []*model.LayerFullInfos{}

	q := "SELECT m.id, m.type, m.start_date, m.end_date, m.min_scale, m.max_scale, ST_AsGeoJSON(m.geographical_extent_geom) as geographical_extent_geom, m.published, m.created_at, m.creator_user_id, u.firstname || ' ' || u.lastname as author FROM map_layer m LEFT JOIN \"user\" u ON m.creator_user_id = u.id WHERE m.id > 0"

	if params.Author > 0 {
		q += " AND u.id = :author"
	}

	if params.Published || !viewUnpublished {
		q += " AND m.published = 't'"
	}

	if params.Type != "" {
		q += " AND m.type= :type"
	}

	if params.Bounding_box != "" {
		q += " AND (ST_Contains(ST_GeomFromGeoJSON(:bounding_box), m.geographical_extent_geom::::geometry) OR ST_Contains(m.geographical_extent_geom::::geometry, ST_GeomFromGeoJSON(:bounding_box)) OR ST_Overlaps(ST_GeomFromGeoJSON(:bounding_box), m.geographical_extent_geom::::geometry))"
	}

	if params.Check_dates {
		q += " AND m.start_date >= :start_date AND m.end_date <= :end_date"
	}

	in := model.IntJoin(params.Ids, false)

	if in != "" {
		q += " AND m.id IN (" + in + ")"
	}

	nstmt, err := tx.PrepareNamed(q)
	if err != nil {
		log.Println(err)
		_ = tx.Rollback()
		return
	}
	err = nstmt.Select(&layers, params)

	for _, layer := range layers {

		tr := []model.Map_layer_tr{}
		err = tx.Select(&tr, "SELECT * FROM map_layer_tr WHERE map_layer_id = "+strconv.Itoa(layer.Id))
		if err != nil {
			log.Println(err)
			_ = tx.Rollback()
			return
		}
		layer.Uniq_code = layer.Type + strconv.Itoa(layer.Id)
		layer.Name = model.MapSqlTranslations(tr, "Lang_isocode", "Name")
		layer.Attribution = model.MapSqlTranslations(tr, "Lang_isocode", "Attribution")
		layer.Copyright = model.MapSqlTranslations(tr, "Lang_isocode", "Copyright")
		layer.Description = model.MapSqlTranslations(tr, "Lang_isocode", "Description")
	}

	return
}
Esempio n. 2
0
func mapGetSitesAsJson(sites []int, tx *sqlx.Tx) string {

	// for measuring execution time
	start := time.Now()

	var jsonResult []string

	q := `SELECT '{"type": "Feature", ' ||`
	q += `'"geometry": {"type": "Point", "coordinates": [' || (`
	q += `	SELECT ST_X(geom::geometry) || ', ' || ST_Y(geom::geometry) AS coordinates FROM site WHERE id = s.id`
	q += `) || ']}, ' ||`
	q += `'"properties": {"infos": ' || (`
	q += `	SELECT row_to_json(site_infos) || ',' || `
	q += `	'"site_ranges": ' || (`
	q += `		SELECT  array_to_json(array_agg(row_to_json(q_src))) FROM (`
	q += `			SELECT start_date1, start_date2, end_date1, end_date2, `
	q += `			(`
	q += `				SELECT array_to_json(array_agg(row_to_json(q_src2))) FROM (`
	q += `					SELECT src.* FROM site_range__charac src WHERE src.site_range_id IN (SELECT site_range_id FROM site_range__charac WHERE site_range_id = sr.id)`
	q += `				) q_src2`
	q += `			) characs`
	q += `	   	FROM site_range sr WHERE sr.site_id = s.id) q_src`
	q += `	)`
	q += `	 FROM (SELECT si.id, si.code, si.name, si.centroid, si.occupation, si.start_date1, si.start_date2, si.end_date1, si.end_date2, d.id AS database_id, d.name as database_name FROM site si LEFT JOIN database d ON si.database_id = d.id WHERE si.id = s.id) site_infos`
	q += `)`
	q += `|| '}}'`
	q += ` FROM site s WHERE s.id IN (` + model.IntJoin(sites, true) + `)`

	err := tx.Select(&jsonResult, q)

	elapsed := time.Since(start)
	fmt.Printf("mapGetSitesAsJson took %s", elapsed)

	if err != nil {
		fmt.Println(err.Error())
	}

	jsonString := `{"type": "FeatureCollection", "features": [` + strings.Join(jsonResult, ",") + `]}`

	return jsonString
}
Esempio n. 3
0
// update charac recursively
func setCharacRecursive(tx *sqlx.Tx, charac *CharacTreeStruct, parent *CharacTreeStruct) error {
	var err error = nil

	// if we are the root, we have no parent id
	if parent != nil {
		charac.Parent_id = parent.Id
	} else {
		charac.Parent_id = 0
	}

	// save charac...
	if charac.Id > 0 {
		err = charac.Update(tx)
		if err != nil {
			return err
		}
	} else {
		err = charac.Create(tx)
		if err != nil {
			return err
		}
	}

	//log.Println("c: ", charac)

	// delete any translations
	_, err = tx.Exec("DELETE FROM charac_tr WHERE charac_id = $1", charac.Id)
	if err != nil {
		return err
	}

	// create a map of translations for name...
	tr := map[string]*model.Charac_tr{}
	for isocode, name := range charac.Name {
		tr[isocode] = &model.Charac_tr{
			Charac_id:    charac.Id,
			Lang_isocode: isocode,
			Name:         name,
		}
	}

	// continue to update this map with descriptions...
	for isocode, description := range charac.Description {
		m, ok := tr[isocode]
		if ok {
			m.Description = description
		} else {
			tr[isocode] = &model.Charac_tr{
				Charac_id:    charac.Id,
				Lang_isocode: isocode,
				Description:  description,
			}
		}
	}

	// now insert translations rows in database...
	for _, m := range tr {
		err = m.Create(tx)
		if err != nil {
			return err
		}
	}

	// recursively call to subcontents...
	ids := []int{} // this array will be usefull to delete others charac of this sub level that does not exists anymore
	for _, sub := range charac.Content {
		err = setCharacRecursive(tx, &sub, charac)
		if err != nil {
			return err
		}
		ids = append(ids, sub.Charac.Id)
	}

	// search any charac that should be deleted
	ids_to_delete := []int{} // the array of characs id to delete
	err = tx.Select(&ids_to_delete, "SELECT id FROM charac WHERE id NOT IN ("+model.IntJoin(ids, true)+") AND parent_id = "+strconv.Itoa(charac.Charac.Id))
	if err != nil {
		return err
	}

	// delete translations of the characs that should be deleted
	_, err = tx.Exec("DELETE FROM charac_tr WHERE charac_id IN (" + model.IntJoin(ids_to_delete, true) + ")")
	if err != nil {
		return err
	}

	// delete characs itselfs...
	_, err = tx.Exec("DELETE FROM charac WHERE id IN (" + model.IntJoin(ids_to_delete, true) + ")")
	if err != nil {
		return err
	}

	return err
}
Esempio n. 4
0
// ExportCSV exports database and sites as as csv file
func SitesAsCSV(siteIDs []int, isoCode string, includeDbName bool, tx *sqlx.Tx) (outp string, err error) {

	var buff bytes.Buffer

	w := csv.NewWriter(&buff)
	w.Comma = ';'
	w.UseCRLF = true

	if includeDbName {
		err = w.Write([]string{"DATABASE_NAME", "SITE_SOURCE_ID", "SITE_NAME", "MAIN_CITY_NAME", "GEONAME_ID", "PROJECTION_SYSTEM", "LONGITUDE", "LATITUDE", "ALTITUDE", "CITY_CENTROID", "STATE_OF_KNOWLEDGE", "OCCUPATION", "STARTING_PERIOD", "ENDING_PERIOD", "CARAC_NAME", "CARAC_LVL1", "CARAC_LVL2", "CARAC_LVL3", "CARAC_LVL4", "CARAC_EXP", "BIBLIOGRAPHY", "COMMENTS"})
	} else {
		err = w.Write([]string{"SITE_SOURCE_ID", "SITE_NAME", "MAIN_CITY_NAME", "GEONAME_ID", "PROJECTION_SYSTEM", "LONGITUDE", "LATITUDE", "ALTITUDE", "CITY_CENTROID", "STATE_OF_KNOWLEDGE", "OCCUPATION", "STARTING_PERIOD", "ENDING_PERIOD", "CARAC_NAME", "CARAC_LVL1", "CARAC_LVL2", "CARAC_LVL3", "CARAC_LVL4", "CARAC_EXP", "BIBLIOGRAPHY", "COMMENTS"})
	}
	if err != nil {
		log.Println("database::ExportCSV : ", err.Error())
	}
	w.Flush()

	// Cache characs
	characs := make(map[int]string)

	q := "WITH RECURSIVE nodes_cte(id, path) AS (SELECT ca.id, cat.name::TEXT AS path FROM charac AS ca LEFT JOIN charac_tr cat ON ca.id = cat.charac_id LEFT JOIN lang ON cat.lang_isocode = lang.isocode WHERE lang.isocode = $1 AND ca.parent_id = 0 UNION ALL SELECT ca.id, (p.path || ';' || cat.name) FROM nodes_cte AS p, charac AS ca LEFT JOIN charac_tr cat ON ca.id = cat.charac_id LEFT JOIN lang ON cat.lang_isocode = lang.isocode WHERE lang.isocode = $1 AND ca.parent_id = p.id) SELECT * FROM nodes_cte AS n ORDER BY n.id ASC"

	rows, err := tx.Query(q, isoCode)
	switch {
	case err == sql.ErrNoRows:
		rows.Close()
		return outp, nil
	case err != nil:
		rows.Close()
		return
	}
	for rows.Next() {
		var id int
		var path string
		if err = rows.Scan(&id, &path); err != nil {
			return
		}
		characs[id] = path
	}

	q = "SELECT db.name as dbname, s.code, s.name, s.city_name, s.city_geonameid, ST_X(s.geom::geometry) as longitude, ST_Y(s.geom::geometry) as latitude, ST_X(s.geom_3d::geometry) as longitude_3d, ST_Y(s.geom_3d::geometry) as latitude3d, ST_Z(s.geom_3d::geometry) as altitude, s.centroid, s.occupation, sr.start_date1, sr.start_date2, sr.end_date1, sr.end_date2, src.exceptional, src.knowledge_type, srctr.bibliography, srctr.comment, c.id as charac_id FROM site s LEFT JOIN database db ON s.database_id = db.id LEFT JOIN site_range sr ON s.id = sr.site_id LEFT JOIN site_tr str ON s.id = str.site_id LEFT JOIN site_range__charac src ON sr.id = src.site_range_id LEFT JOIN site_range__charac_tr srctr ON src.id = srctr.site_range__charac_id LEFT JOIN charac c ON src.charac_id = c.id WHERE s.id in (" + model.IntJoin(siteIDs, true) + ") AND str.lang_isocode IS NULL OR str.lang_isocode = db.default_language ORDER BY s.id, sr.id"

	rows2, err := tx.Query(q)
	if err != nil {
		rows2.Close()
		return
	}
	for rows2.Next() {
		var (
			dbname         string
			code           string
			name           string
			city_name      string
			city_geonameid int
			longitude      float64
			latitude       float64
			longitude3d    float64
			latitude3d     float64
			altitude3d     float64
			centroid       bool
			occupation     string
			start_date1    int
			start_date2    int
			end_date1      int
			end_date2      int
			knowledge_type string
			exceptional    bool
			bibliography   string
			comment        string
			charac_id      int
			slongitude     string
			slatitude      string
			saltitude      string
			scentroid      string
			soccupation    string
			scharacs       string
			scharac_name   string
			scharac_lvl1   string
			scharac_lvl2   string
			scharac_lvl3   string
			scharac_lvl4   string
			sexceptional   string
			// description    string
		)
		if err = rows2.Scan(&dbname, &code, &name, &city_name, &city_geonameid, &longitude, &latitude, &longitude3d, &latitude3d, &altitude3d, &centroid, &occupation, &start_date1, &start_date2, &end_date1, &end_date2, &exceptional, &knowledge_type, &bibliography, &comment, &charac_id); err != nil {
			log.Println(err)
			rows2.Close()
			return
		}
		// Geonameid
		var cgeonameid string
		if city_geonameid != 0 {
			cgeonameid = strconv.Itoa(city_geonameid)
		}
		// Longitude
		slongitude = strconv.FormatFloat(longitude, 'f', -1, 32)
		// Latitude
		slatitude = strconv.FormatFloat(latitude, 'f', -1, 32)
		// Altitude
		if longitude3d == 0 && latitude3d == 0 && altitude3d == 0 {
			saltitude = ""
		} else {
			saltitude = strconv.FormatFloat(altitude3d, 'f', -1, 32)
		}
		// Centroid
		if centroid {
			scentroid = translate.T(isoCode, "IMPORT.CSVFIELD_ALL.T_LABEL_YES")
		} else {
			scentroid = translate.T(isoCode, "IMPORT.CSVFIELD_ALL.T_LABEL_NO")
		}
		// Occupation
		switch occupation {
		case "not_documented":
			soccupation = translate.T(isoCode, "IMPORT.CSVFIELD_OCCUPATION.T_LABEL_NOT_DOCUMENTED")
		case "single":
			soccupation = translate.T(isoCode, "IMPORT.CSVFIELD_OCCUPATION.T_LABEL_SINGLE")
		case "continuous":
			soccupation = translate.T(isoCode, "IMPORT.CSVFIELD_OCCUPATION.T_LABEL_CONTINUOUS")
		case "multiple":
			soccupation = translate.T(isoCode, "IMPORT.CSVFIELD_OCCUPATION.T_LABEL_MULTIPLE")
		}
		// State of knowledge
		switch knowledge_type {
		case "not_documented":
			knowledge_type = translate.T(isoCode, "IMPORT.CSVFIELD_STATE_OF_KNOWLEDGE.T_LABEL_NOT_DOCUMENTED")
		case "literature":
			knowledge_type = translate.T(isoCode, "IMPORT.CSVFIELD_STATE_OF_KNOWLEDGE.T_LABEL_LITERATURE")
		case "prospected_aerial":
			knowledge_type = translate.T(isoCode, "IMPORT.CSVFIELD_STATE_OF_KNOWLEDGE.T_LABEL_PROSPECTED_AERIAL")
		case "prospected_pedestrian":
			knowledge_type = translate.T(isoCode, "IMPORT.CSVFIELD_STATE_OF_KNOWLEDGE.T_LABEL_PROSPECTED_PEDESTRIAN")
		case "surveyed":
			knowledge_type = translate.T(isoCode, "IMPORT.CSVFIELD_STATE_OF_KNOWLEDGE.T_LABEL_SURVEYED")
		case "dig":
			knowledge_type = translate.T(isoCode, "IMPORT.CSVFIELD_STATE_OF_KNOWLEDGE.T_LABEL_DIG")
		}
		// Revert hack on dates
		if start_date1 < 1 && start_date1 > math.MinInt32 {
			start_date1--
		}
		if start_date2 < 1 && start_date2 > math.MinInt32 {
			start_date2--
		}
		if end_date1 < 1 && end_date1 > math.MinInt32 {
			end_date1--
		}
		if end_date2 < 1 && end_date2 > math.MinInt32 {
			end_date2--
		}
		// Starting period
		startingPeriod := ""
		if start_date1 != math.MinInt32 {
			startingPeriod += strconv.Itoa(start_date1)
		}
		if start_date1 != math.MinInt32 && start_date2 != math.MaxInt32 && start_date1 != start_date2 {
			startingPeriod += ":"
		}
		if start_date2 != math.MaxInt32 && start_date1 != start_date2 {
			startingPeriod += strconv.Itoa(start_date2)
		}
		if startingPeriod == "" {
			startingPeriod = translate.T(isoCode, "IMPORT.CSVFIELD_ALL.T_CHECK_UNDETERMINED")
		}
		// Ending period
		endingPeriod := ""
		if end_date1 != math.MinInt32 {
			endingPeriod += strconv.Itoa(end_date1)
		}
		if end_date1 != math.MinInt32 && end_date2 != math.MaxInt32 && end_date1 != end_date2 {
			endingPeriod += ":"
		}
		if end_date2 != math.MaxInt32 && end_date1 != end_date2 {
			endingPeriod += strconv.Itoa(end_date2)
		}
		if endingPeriod == "" {
			endingPeriod = translate.T(isoCode, "IMPORT.CSVFIELD_ALL.T_CHECK_UNDETERMINED")
		}
		// Caracs
		var characPath = characs[charac_id]
		// fmt.Println(code, characPath)
		num := strings.Count(characPath, ";")
		if num < 4 {
			scharacs += characPath + strings.Repeat(";", 4-num)
		} else {
			scharacs = characPath
		}
		scharac_lvl2 = ""
		scharac_lvl3 = ""
		scharac_lvl4 = ""
		for i, c := range strings.Split(scharacs, ";") {
			// fmt.Println(i, c)
			switch i {
			case 0:
				scharac_name = c
			case 1:
				scharac_lvl1 = c
			case 2:
				scharac_lvl2 = c
			case 3:
				scharac_lvl3 = c
			case 4:
				scharac_lvl4 = c
			}

		}
		// fmt.Println(scharac_name, scharac_lvl1, scharac_lvl2, scharac_lvl3, scharac_lvl4)
		// fmt.Println(startingPeriod, endingPeriod)
		// Caracs exp
		if exceptional {
			sexceptional = translate.T(isoCode, "IMPORT.CSVFIELD_ALL.T_LABEL_YES")
		} else {
			sexceptional = translate.T(isoCode, "IMPORT.CSVFIELD_ALL.T_LABEL_NO")
		}

		var line []string

		if includeDbName {
			line = []string{dbname, code, name, city_name, cgeonameid, "4326", slongitude, slatitude, saltitude, scentroid, knowledge_type, soccupation, startingPeriod, endingPeriod, scharac_name, scharac_lvl1, scharac_lvl2, scharac_lvl3, scharac_lvl4, sexceptional, bibliography, comment}
		} else {
			line = []string{code, name, city_name, cgeonameid, "4326", slongitude, slatitude, saltitude, scentroid, knowledge_type, soccupation, startingPeriod, endingPeriod, scharac_name, scharac_lvl1, scharac_lvl2, scharac_lvl3, scharac_lvl4, sexceptional, bibliography, comment}
		}

		err := w.Write(line)
		w.Flush()
		if err != nil {
			log.Println("database::ExportCSV : ", err.Error())
		}
	}

	return buff.String(), nil
}
Esempio n. 5
0
func mapDebug(sites []int, tx *sqlx.Tx) {
	type row struct {
		Id          int
		Start_date1 int
		Start_date2 int
		End_date1   int
		End_date2   int
	}
	rows := []row{}
	err := tx.Select(&rows, "SELECT site.id, sr.start_date1, sr.start_date2, sr.end_date1, sr.end_date2 FROM site LEFT JOIN site_range sr ON sr.site_id = site.id WHERE site.id IN("+model.IntJoin(sites, true)+")")
	if err != nil {
		fmt.Println("err: ", err)
	} /*else {
		for _, r := range rows {
			fmt.Println("r: ", r)
		}
	}*/
}
Esempio n. 6
0
func mapSearch(w http.ResponseWriter, r *http.Request, proute routes.Proute, tocsv bool) {
	// for measuring execution time
	start := time.Now()

	params := proute.Json.(*MapSearchParams)

	fmt.Println("params: ", params)

	tx, err := db.DB.Beginx()
	if err != nil {
		log.Println("can't start transaction")
		userSqlError(w, err)
		return
	}

	_user, _ := proute.Session.Get("user")
	user := _user.(model.User)

	//q := `SELECT site.id FROM site`

	filters := MapSqlQuery{}
	filters.Init()

	// custom hard coded / mandatory filters
	filters.AddTable(&MapSqlDefSite, "site", false)
	filters.AddTable(&MapSqlDefDatabase, "database", false)
	filters.AddFilter("database", `"database".published = true`)

	// add database filter
	filters.AddFilter("database", `"site".database_id IN (`+model.IntJoin(params.Database, true)+`)`)

	// Area filter
	fmt.Println(params.Area.Type, params.Area.Lat, params.Area.Lng, params.Area.Radius)
	if params.Area.Type == "disc" || params.Area.Type == "custom" {
		filters.AddFilter("site", `ST_DWithin("site".geom, Geography(ST_MakePoint($$, $$)), $$)`,
			params.Area.Lng, params.Area.Lat, params.Area.Radius)
	} else {
		filters.AddFilter("site", `ST_Within("site".geom::geometry, ST_SetSRID(ST_GeomFromGeoJSON($$),4326))`,
			params.Area.Geojson.Geometry)
	}

	// add centroid filter
	switch params.Others.Centroid {
	case "with":
		filters.AddFilter("site", `"site".centroid = true`)
	case "without":
		filters.AddFilter("site", `"site".centroid = false`)
	case "":
		// do not filter
	}

	// add knowledge filter
	str := ""
	for _, knowledge := range params.Others.Knowledges {
		switch knowledge {
		case "literature", "surveyed", "dig", "not_documented", "prospected_aerial", "prospected_pedestrian":
			if str == "" {
				str += "'" + knowledge + "'"
			} else {
				str += ",'" + knowledge + "'"
			}
		}
	}
	if str != "" {
		filters.AddTable(&MapSqlDefSiteRange, `site_range`, false)
		filters.AddTable(&MapSqlDefSiteRangeCharac, `site_range__charac`, false)
		filters.AddFilter("site_range__charac", `"site_range__charac".knowledge_type IN (`+str+`)`)
	}

	// add occupation filter
	str = ""
	for _, occupation := range params.Others.Knowledges {
		switch occupation {
		case "not_documented", "single", "continuous", "multiple":
			if str == "" {
				str += "'" + occupation + "'"
			} else {
				str += ",'" + occupation + "'"
			}
		}
	}
	if str != "" {
		filters.AddFilter("site", `"site".occupation IN (`+str+`)`)
	}

	// text filter
	if params.Others.TextSearch != "" {
		str = "1=0"
		args := []interface{}{}
		for _, textSearchIn := range params.Others.TextSearchIn {
			switch textSearchIn {
			case "site_name":
				args = append(args, "%"+params.Others.TextSearch+"%")
				str += ` OR "site".name ILIKE $$`
			case "city_name":
				args = append(args, "%"+params.Others.TextSearch+"%")
				str += ` OR "site".city_name ILIKE $$`
			case "bibliography":
				args = append(args, "%"+params.Others.TextSearch+"%")
				filters.AddTable(&MapSqlDefSiteRange, `site_range`, false)
				filters.AddTable(&MapSqlDefSiteRangeCharac, `site_range__charac`, false)
				filters.AddTable(&MapSqlDefSiteRangeCharacTr, `site_range__charac_tr`, false)
				str += ` OR "site_range__charac_tr".bibliography ILIKE $$`
			case "comment":
				args = append(args, "%"+params.Others.TextSearch+"%")
				filters.AddTable(&MapSqlDefSiteRange, `site_range`, false)
				filters.AddTable(&MapSqlDefSiteRangeCharac, `site_range__charac`, false)
				filters.AddTable(&MapSqlDefSiteRangeCharacTr, `site_range__charac_tr`, false)
				str += ` OR "site_range__charac_tr".comment ILIKE $$`
			}
		}
		if str != "1=0" {
			filters.AddFilter("site", str, args...)
		}
	}

	// characs filters
	includes := make(map[int][]int, 0)
	excludes := make(map[int][]int, 0)
	exceptionals := make(map[int][]int, 0)

	for characid, sel := range params.Characs {
		if sel.Include && !sel.Exceptional {
			if _, ok := includes[sel.RootId]; !ok {
				includes[sel.RootId] = make([]int, 0)
			}
			includes[sel.RootId] = append(includes[sel.RootId], characid)
		} else if sel.Include && sel.Exceptional {
			if _, ok := exceptionals[sel.RootId]; !ok {
				exceptionals[sel.RootId] = make([]int, 0)
			}
			exceptionals[sel.RootId] = append(exceptionals[sel.RootId], characid)
		} else if !sel.Include {
			if _, ok := excludes[sel.RootId]; !ok {
				excludes[sel.RootId] = make([]int, 0)
			}
			excludes[sel.RootId] = append(excludes[sel.RootId], characid)
		}
	}

	if params.Others.CharacsLinked == "all" {
		for rootid, characids := range includes {
			tableas := "site_range__charac_" + strconv.Itoa(rootid)
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, tableas, false)
			filters.AddFilter(tableas, tableas+`.charac_id IN (`+model.IntJoin(characids, true)+`)`)
		}

		for rootid, characids := range exceptionals {
			tableas := "site_range__charac_" + strconv.Itoa(rootid)
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, tableas, false)

			q := "1=0"
			for _, characid := range characids {
				q += " OR " + tableas + ".charac_id = " + strconv.Itoa(characid) + " AND " + tableas + ".exceptional = true"
			}

			filters.AddFilter(tableas, q)
		}

		for rootid, characids := range excludes {
			tableas := "x_site_range__charac_" + strconv.Itoa(rootid)
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, tableas, true)
			filters.AddFilter(tableas, tableas+".charac_id IN ("+model.IntJoin(characids, true)+")")
		}

	} else if params.Others.CharacsLinked == "at-least-one" { // default
		s_includes := []int{}
		s_excludes := []int{}
		s_exceptionals := []int{}

		for _, characids := range includes {
			s_includes = append(s_includes, characids...)
		}
		for _, characids := range excludes {
			s_excludes = append(s_excludes, characids...)
		}
		for _, characids := range exceptionals {
			s_exceptionals = append(s_exceptionals, characids...)
		}

		if len(s_includes) > 0 {
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, "site_range__charac", false)
			filters.AddFilter("site_range__charac", `site_range__charac.charac_id IN (`+model.IntJoin(s_includes, true)+`)`)
		}

		if len(s_excludes) > 0 {
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, "x_site_range__charac", true)
			filters.AddFilter("x_site_range__charac", `x_site_range__charac.charac_id IN (`+model.IntJoin(s_includes, true)+`)`)
		}

		if len(s_exceptionals) > 0 {
			filters.AddTable(&MapSqlDefSiteRange, "site_range", false)
			filters.AddTable(&MapSqlDefSiteRangeCharac, "site_range__charac", false)
			q := "1=0"
			for _, characid := range s_exceptionals {
				q += " OR site_range__charac.charac_id = " + strconv.Itoa(characid) + " AND site_range__charac.exceptional = true"
			}
			filters.AddFilter("site_range__charac", q)
		}
	}

	/*
		if len(excludes) > 0 {
			filters.AddExclude("site_range__charac", "x_site_range__charac.charac_id IN ("+model.IntJoin(excludes, true)+")")
		}
	*/
	for _, chronology := range params.Chronologies {

		q := "1=1"

		var start_date_str = strconv.Itoa(chronology.StartDate)
		var end_date_str = strconv.Itoa(chronology.EndDate)

		var tblname string
		if chronology.ExistenceInsideInclude == "+" {
			tblname = "site"
		} else if chronology.ExistenceInsideInclude == "-" {
			tblname = "x_site"
		} else {
			log.Println("ExistenceInsideInclude is bad : ", chronology.ExistenceInsideInclude)
			_ = tx.Rollback()
			return
		}

		switch chronology.ExistenceInsideSureness {
		case "potentially":
			q += " AND " + tblname + ".start_date1 <= " + end_date_str + " AND " + tblname + ".end_date2 >= " + start_date_str
			if chronology.ExistenceInsidePart == "full" {
				q += " AND " + tblname + ".start_date1 >= " + start_date_str + " AND " + tblname + ".end_date2 <= " + end_date_str
			}
		case "certainly":
			q += " AND " + tblname + ".start_date2 <= " + end_date_str + " AND " + tblname + ".end_date1 >= " + start_date_str
			if chronology.ExistenceInsidePart == "full" {
				q += " AND " + tblname + ".start_date2 >= " + start_date_str + " AND " + tblname + ".end_date1 <= " + end_date_str
			}
		case "potentially-only":
			q += " AND " + tblname + ".start_date1 <= " + end_date_str + " AND " + tblname + ".end_date2 >= " + start_date_str
			q += " AND " + tblname + ".start_date2 > " + end_date_str + " AND " + tblname + ".end_date1 < " + start_date_str

			if chronology.ExistenceInsidePart == "full" {
				q += " AND " + tblname + ".start_date1 >= " + start_date_str + " AND " + tblname + ".end_date2 <= " + end_date_str
			}
		}

		switch chronology.ExistenceOutsideInclude {
		case "": // it can
			// do nothing
		case "+": // it must
			switch chronology.ExistenceOutsideSureness {
			case "potentially":
				q += " AND (" + tblname + ".start_date2 < " + start_date_str + " OR " + tblname + ".end_date1 >= " + end_date_str + ")"
			case "certainly":
				q += " AND (" + tblname + ".start_date1 < " + start_date_str + " OR " + tblname + ".end_date1 >= " + end_date_str + ")"
			case "potentially-only":
				q += " AND (" + tblname + ".start_date2 < " + start_date_str + " AND " + tblname + ".start_date1 >= " + start_date_str
				q += " OR " + tblname + ".end_date1 > " + end_date_str + " AND " + tblname + ".end_date2 <= " + end_date_str + ")"
			}

		case "-": // it must not
			switch chronology.ExistenceOutsideSureness {
			case "potentially":
				q += " AND NOT (" + tblname + ".start_date2 < " + start_date_str + " OR " + tblname + ".end_date1 >= " + end_date_str + ")"
			case "certainly":
				q += " AND NOT (" + tblname + ".start_date1 < " + start_date_str + " OR " + tblname + ".end_date1 >= " + end_date_str + ")"
			case "potentially-only":
				q += " AND NOT (" + tblname + ".start_date2 < " + start_date_str + " AND " + tblname + ".start_date1 >= " + start_date_str
				q += " OR " + tblname + ".end_date1 > " + end_date_str + " AND " + tblname + ".end_date2 <= " + end_date_str + ")"
			}
		}

		if q != "1=1" {
			if chronology.ExistenceInsideInclude == "+" {
				filters.AddFilter("site", q)
			} else if chronology.ExistenceInsideInclude == "-" {
				filters.AddTable(&MapSqlDefXSite, "x_site", true)
				filters.AddFilter("x_site", q)
			}
		}
	}

	q, q_args := filters.BuildQuery()
	fmt.Println("q: ", q, q_args)

	site_ids := []int{}
	err = tx.Select(&site_ids, q, q_args...)
	if err != nil {
		fmt.Println("query failed : ", err)
		userSqlError(w, err)
		_ = tx.Rollback()
		return
	}
	//fmt.Println("site_ids : ", site_ids)

	elapsed := time.Since(start)
	fmt.Printf("Search took %s", elapsed)

	res := ""
	if tocsv {
		fmt.Println("ICI")
		w.Header().Set("Content-Type", "text/csv")
		csvContent, err := export.SitesAsCSV(site_ids, user.First_lang_isocode, true, tx)
		if err != nil {
			log.Println("can't export query as csv")
			userSqlError(w, err)
			return
		}
		w.Header().Set("Content-Type", "text/csv")
		w.Header().Set("Content-Disposition", "attachment; filename=export.csv")
		w.Write([]byte(csvContent))
	} else {
		w.Header().Set("Content-Type", "application/json")
		res = mapGetSitesAsJson(site_ids, tx)
	}
	//mapDebug(site_ids, tx)

	err = tx.Commit()
	if err != nil {
		log.Println("can't commit")
		userSqlError(w, err)
		return
	}

	w.Write([]byte(res))
	return
}