Пример #1
0
func (this *PrintGeoTiffTags) Run() {

	rasterType, err := raster.DetermineRasterFormat(this.inputFile)
	if rasterType != raster.RT_GeoTiff || err != nil {
		println("The input file is not of a GeoTIFF format.")
		return
	}

	input, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}

	tagInfo := input.GetMetadataEntries()
	if len(tagInfo) > 0 {
		println(tagInfo[0])
	} else {
		println("Error reading metadata entries.")
	}
}
Пример #2
0
func (this *BreachDepressions) Run() {
	//	cfg := profile.Config{
	//		CPUProfile:     true,
	//		NoShutdownHook: true, // do not hook SIGINT
	//		ProfilePath:    "/Users/johnlindsay/Documents/",
	//	}
	//	//profile.Config.ProfilePath("/Users/johnlindsay/Documents/")
	//	prf := profile.Start(&cfg)
	//	defer prf.Stop()

	//this.postBreachFilling = false

	if this.toolManager.BenchMode {
		benchmarkBreachDepressions(this)
		return
	}

	start1 := time.Now()

	var progress, oldProgress, col, row, i, n int
	var colN, rowN, r, c, flatindex int
	numSolvedCells := 0
	var dir byte
	needsFilling := false
	var z, zN, lowestNeighbour float64
	var zTest, zN2 float64
	var gc gridCell
	var p int64
	var breachDepth, maxPathBreachDepth float64
	var numCellsInPath int32
	var isPit, isEdgeCell bool
	numPits := 0
	numPitsSolved := 0
	numUnsolvedPits := 0
	numValidCells := 0
	var isActive bool
	dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
	dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
	backLink := [8]byte{5, 6, 7, 8, 1, 2, 3, 4}
	//outPointer := [9]float64{0, 1, 2, 4, 8, 16, 32, 64, 128}
	maxLengthOrDepthUsed := false
	if this.maxDepth > 0 || this.maxLength > 0 {
		maxLengthOrDepthUsed = true
	}
	if maxLengthOrDepthUsed && this.maxDepth == -1 {
		this.maxDepth = math.MaxFloat64
	}
	if maxLengthOrDepthUsed && this.maxLength == -1 {
		this.maxLength = math.MaxInt32
	}
	performConstrainedBreaching := this.constrainedBreaching
	if !maxLengthOrDepthUsed && performConstrainedBreaching {
		performConstrainedBreaching = false
	}
	//outputPointer := false
	//performFlowAccumulation := false
	println("Reading DEM data...")
	dem, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}
	demConfig := dem.GetRasterConfig()
	rows := dem.Rows
	columns := dem.Columns
	rowsLessOne := rows - 1
	numCellsTotal := rows * columns
	nodata := dem.NoDataValue
	paletteName := demConfig.PreferredPalette
	minVal := dem.GetMinimumValue()
	elevDigits := len(strconv.Itoa(int(dem.GetMaximumValue() - minVal)))
	elevMultiplier := math.Pow(10, float64(8-elevDigits))
	SMALL_NUM := 1 / elevMultiplier * 10
	POS_INF := math.Inf(1)

	start2 := time.Now()

	output := make([][]float64, rows+2)
	pits := make([][]bool, rows+2)
	inQueue := make([][]bool, rows+2)
	flowdir := make([][]byte, rows+2)

	for i = 0; i < rows+2; i++ {
		output[i] = make([]float64, columns+2)
		pits[i] = make([]bool, columns+2)
		inQueue[i] = make([]bool, columns+2)
		flowdir[i] = make([]byte, columns+2)
	}

	//	output := structures.Create2dFloat64Array(rows+2, columns+2)
	//	pits := structures.Create2dBoolArray(rows+2, columns+2)
	//	inQueue := structures.Create2dBoolArray(rows+2, columns+2)
	//	flowdir := structures.Create2dByteArray(rows+2, columns+2)

	pq := NewPQueue()

	//q := NewQueue()
	var floodorder []int
	//floodorder := make([]int, numCellsTotal)
	floodOrderTail := 0

	// find the pit cells and initialize the grids
	printf("\rBreaching DEM (1 of 2): %v%%", 0)
	oldProgress = 0
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = dem.Value(row, col)
			output[row+1][col+1] = z
			flowdir[row+1][col+1] = 0
			if z != nodata {
				isPit = true
				isEdgeCell = false
				lowestNeighbour = POS_INF
				for n = 0; n < 8; n++ {
					zN = dem.Value(row+dY[n], col+dX[n])
					if zN != nodata && zN < z {
						isPit = false
						break
					} else if zN == nodata {
						isEdgeCell = true
					} else {
						if zN < lowestNeighbour {
							lowestNeighbour = zN
						}
					}
				}
				if isEdgeCell {
					gc = newGridCell(row+1, col+1, 0)
					p = int64(int64(z*elevMultiplier) * 100000)
					pq.Push(gc, p)
					inQueue[row+1][col+1] = true
				}
				if isPit {
					//					if isEdgeCell { // pit on an edge
					//						gc = newGridCell(row+1, col+1, 0)
					//						p = int64(int64(z*elevMultiplier) * 100000)
					//						//						item = &Item{
					//						//							value:    gc,
					//						//							priority: p,
					//						//						}
					//						//						heap.Push(&pq, item)
					//						pq.Push(gc, p)
					//						inQueue[row+1][col+1] = true
					//					} else { // interior pit
					if !isEdgeCell {
						pits[row+1][col+1] = true
						numPits++
					}
					/* raising a pit cell to just lower than the
					 *  elevation of its lowest neighbour will
					 *  reduce the length and depth of the trench
					 *  that is necessary to eliminate the pit
					 *  by quite a bit on average.
					 */
					if lowestNeighbour != POS_INF {
						output[row+1][col+1] = lowestNeighbour - SMALL_NUM
					}
					//}
				}
				numValidCells++
			} else {
				numSolvedCells++
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rBreaching DEM (1 of 2): %v%%", progress)
			oldProgress = progress
		}
	}

	for row = 0; row < rows+2; row++ {
		output[row][0] = nodata
		output[row][columns+1] = nodata
		flowdir[row][0] = 0
		flowdir[row][columns+1] = 0
	}

	for col = 0; col < columns+2; col++ {
		output[0][col] = nodata
		output[rows+1][col] = nodata
		flowdir[0][col] = 0
		flowdir[rows+1][col] = 0
	}

	//heap.Init(&pq)

	// now breach
	printf("\r                                                                 ")
	oldProgress = int(100.0 * numSolvedCells / numCellsTotal)
	printf("\rBreaching DEM (2 of 2): %v%%", oldProgress)

	if !maxLengthOrDepthUsed {
		// Perform a complete breaching solution; there will be no subseqent filling
		for numPitsSolved < numPits {
			gc = pq.Pop()
			row = gc.row
			col = gc.column
			flatindex = gc.flatIndex
			for i = 0; i < 8; i++ {
				rowN = row + dY[i]
				colN = col + dX[i]
				zN = output[rowN][colN]
				if zN != nodata && !inQueue[rowN][colN] {
					flowdir[rowN][colN] = backLink[i]
					if pits[rowN][colN] {
						numPitsSolved++
						// trace the flowpath back until you find a lower cell
						zTest = zN
						r = rowN
						c = colN
						isActive = true
						for isActive {
							zTest -= SMALL_NUM // ensures a small increment slope
							dir = flowdir[r][c]
							if dir > 0 {
								r += dY[dir-1]
								c += dX[dir-1]
								zN2 = output[r][c]
								if zN2 <= zTest || zN2 == nodata {
									// a lower grid cell or edge has been found
									isActive = false
								} else {
									output[r][c] = zTest
								}
							} else {
								// a pit has been located, likely at the edge
								isActive = false
							}
						}
					}
					numSolvedCells++
					n = 0
					if pits[rowN][colN] {
						n = flatindex + 1
					}
					gc = newGridCell(rowN, colN, n)
					p = int64(int64(zN*elevMultiplier)*100000 + (int64(n) % 100000))
					pq.Push(gc, p)
					inQueue[rowN][colN] = true
				}
			}
			progress = int(100.0 * numSolvedCells / numCellsTotal)
			if progress != oldProgress {
				printf("\rBreaching DEM (2 of 2): %v%%", progress)
				oldProgress = progress
			}
		}
	} else if !performConstrainedBreaching {
		// Perform selective breaching. Sinks that can be removed within the
		// specified constraints of the max breach length and depth will
		// be breached. Otherwise they will be removed during a subsequent
		// filling operation.
		floodorder = make([]int, numValidCells)
		for pq.Len() > 0 { //numPitsSolved < numPits {
			gc = pq.Pop()
			row = gc.row
			col = gc.column
			if this.postBreachFilling {
				//q.Push(row, col)
				floodorder[floodOrderTail] = row*columns + col
				floodOrderTail++
			}
			flatindex = gc.flatIndex
			for i = 0; i < 8; i++ {
				rowN = row + dY[i]
				colN = col + dX[i]
				zN = output[rowN][colN]
				if zN != nodata && !inQueue[rowN][colN] {
					flowdir[rowN][colN] = backLink[i]
					if pits[rowN][colN] {
						numPitsSolved++
						// trace the flowpath back until you find a lower cell
						// or a constraint is encountered
						numCellsInPath = 0
						maxPathBreachDepth = 0

						zTest = zN
						r = rowN
						c = colN
						isActive = true
						for isActive {
							zTest -= SMALL_NUM // ensures a small increment slope
							dir = flowdir[r][c]
							if dir > 0 {
								r += dY[dir-1]
								c += dX[dir-1]
								zN2 = output[r][c]
								if zN2 <= zTest || zN2 == nodata {
									// a lower grid cell has been found
									isActive = false
								} else {
									breachDepth = dem.Value(r-1, c-1) - zTest
									if breachDepth > maxPathBreachDepth {
										maxPathBreachDepth = breachDepth
									}
								}
							} else {
								isActive = false
							}
							numCellsInPath++
							if numCellsInPath > this.maxLength {
								isActive = false
							}
							if maxPathBreachDepth > this.maxDepth {
								isActive = false
							}
						}

						if numCellsInPath <= this.maxLength && maxPathBreachDepth <= this.maxDepth {
							// breach it completely
							zTest = zN
							r = rowN
							c = colN
							isActive = true
							for isActive {
								zTest -= SMALL_NUM // ensures a small increment slope
								dir = flowdir[r][c]
								if dir > 0 {
									r += dY[dir-1]
									c += dX[dir-1]
									zN2 = output[r][c]
									if zN2 <= zTest || zN2 == nodata {
										// a lower grid cell has been found
										isActive = false
									} else {
										output[r][c] = zTest
									}
								} else {
									isActive = false
								}
							}
						} else {
							// it will be removed by filling in the next step.
							needsFilling = true
							numUnsolvedPits++
						}
					}
					numSolvedCells++
					n = 0
					if pits[rowN][colN] {
						n = flatindex + 1
					}
					gc = newGridCell(rowN, colN, n)
					p = int64(int64(zN*elevMultiplier)*100000 + (int64(n) % 100000))
					pq.Push(gc, p)
					inQueue[rowN][colN] = true
				}
			}
			progress = int(100.0 * numSolvedCells / numCellsTotal)
			if progress != oldProgress {
				printf("\rBreaching DEM (2 of 2): %v%%", progress)
				oldProgress = progress
			}
		}
	} else {
		// perform constrained breaching
		floodorder = make([]int, numValidCells)
		var outletHeight float64
		var outletDist, targetDist, j int32
		var zOrig float64
		for pq.Len() > 0 { //numPitsSolved < numPits {
			//item := heap.Pop(&pq).(*Item)
			//gc = item.value
			gc = pq.Pop()
			row = gc.row
			col = gc.column
			if this.postBreachFilling {
				//q.Push(row, col)
				floodorder[floodOrderTail] = row*columns + col
				floodOrderTail++
			}
			flatindex = gc.flatIndex
			//z = output[row][col]
			for i = 0; i < 8; i++ {
				rowN = row + dY[i]
				colN = col + dX[i]
				zN = output[rowN][colN]
				if zN != nodata && !inQueue[rowN][colN] {
					flowdir[rowN][colN] = backLink[i]
					if pits[rowN][colN] {
						numPitsSolved++
						// trace the flowpath back until you find a lower cell
						// or a constraint is encountered
						numCellsInPath = 0
						maxPathBreachDepth = 0

						zTest = zN
						r = rowN
						c = colN
						outletHeight = -math.MaxFloat64
						outletDist = 0
						isActive = true
						for isActive {
							zTest -= SMALL_NUM // ensures a small increment slope
							dir = flowdir[r][c]
							if dir > 0 {
								r += dY[dir-1]
								c += dX[dir-1]
								zN2 = output[r][c]
								if zN2 <= zTest || zN2 == nodata {
									// a lower grid cell has been found
									isActive = false
								} else {
									zOrig = dem.Value(r-1, c-1)
									breachDepth = zOrig - zTest
									if breachDepth > maxPathBreachDepth {
										maxPathBreachDepth = breachDepth
									}
									if zOrig > outletHeight {
										outletHeight = zOrig
										outletDist = numCellsInPath
									}
								}
							} else {
								isActive = false
							}
							numCellsInPath++
						}

						if numCellsInPath <= this.maxLength && maxPathBreachDepth <= this.maxDepth {
							// breach it completely
							zTest = zN
							r = rowN
							c = colN
							isActive = true
							for isActive {
								zTest -= SMALL_NUM // ensures a small increment slope
								dir = flowdir[r][c]
								if dir > 0 {
									r += dY[dir-1]
									c += dX[dir-1]
									zN2 = output[r][c]
									if zN2 <= zTest || zN2 == nodata {
										// a lower grid cell has been found
										isActive = false
									} else {
										output[r][c] = zTest
									}
								} else {
									isActive = false
								}
							}
						} else {
							// ***Constrained Breaching***
							// it will be completely removed by filling in the next step...
							needsFilling = true
							// but in the meantime, lower the outlet as much as you can.

							zTest = outletHeight - this.maxDepth
							targetDist = numCellsInPath

							if numCellsInPath > this.maxLength {
								if outletDist < this.maxLength/2 {
									targetDist = this.maxLength
								} else {
									targetDist = outletDist + this.maxLength/2
								}
								r = rowN
								c = colN
								for j = 0; j < targetDist; j++ {
									dir = flowdir[r][c]
									if dir > 0 {
										r += dY[dir-1]
										c += dX[dir-1]
										zTest = output[r][c]
									} else {
										break
									}
								}
								if outletHeight-zTest > this.maxDepth {
									zTest = outletHeight - this.maxDepth
								}
							}

							r = rowN
							c = colN
							isActive = true
							numCellsInPath = 0
							for isActive {
								dir = flowdir[r][c]
								if dir > 0 {
									r += dY[dir-1]
									c += dX[dir-1]
									zN2 = output[r][c]
									if zN2 <= zN || zN2 == nodata {
										// a lower grid cell has been found
										isActive = false
									} else {
										if output[r][c] > zTest {
											output[r][c] = zTest
										}
									}
								} else {
									isActive = false
								}
								numCellsInPath++
								if numCellsInPath > targetDist {
									isActive = false
								}
							}
						}
					}
					numSolvedCells++
					n = 0
					if pits[rowN][colN] {
						n = flatindex + 1
					}
					gc = newGridCell(rowN, colN, n)
					p = int64(int64(zN*elevMultiplier)*100000 + (int64(n) % 100000))
					pq.Push(gc, p)
					inQueue[rowN][colN] = true
				}
			}
			progress = int(100.0 * numSolvedCells / numCellsTotal)
			if progress != oldProgress {
				printf("\rBreaching DEM (2 of 2): %v%%", progress)
				oldProgress = progress
			}
		}
	}

	pits = nil
	inQueue = nil

	if needsFilling && this.postBreachFilling {
		// Fill the DEM.
		printf("\r                                                                ")

		numSolvedCells = 0
		//for q.Len() > 0 {
		//row, col = q.Pop()
		for c := 0; c < numValidCells; c++ {
			row = floodorder[c] / columns
			col = floodorder[c] % columns
			if row >= 0 && col >= 0 {
				z = output[row][col]
				dir = flowdir[row][col]
				if dir > 0 {
					rowN = row + dY[dir-1]
					colN = col + dX[dir-1]
					zN = output[rowN][colN]
					if zN != nodata {
						if z <= zN+SMALL_NUM {
							output[row][col] = zN + SMALL_NUM
						}
					}
				}
			}
			numSolvedCells++
			progress = int(100.0 * numSolvedCells / numValidCells)
			if progress != oldProgress {
				printf("\rFilling DEM: %v%%", progress)
				oldProgress = progress
			}
		}
	}

	// output the data
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = paletteName
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	displayMin := demConfig.DisplayMinimum
	displayMax := demConfig.DisplayMaximum
	config.CoordinateRefSystemWKT = demConfig.CoordinateRefSystemWKT
	config.EPSGCode = demConfig.EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		dem.North, dem.South, dem.East, dem.West, config)
	if err != nil {
		panic("Failed to write raster")
	}

	printf("\nSaving DEM data...\n")
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = output[row+1][col+1]
			rout.SetValue(row, col, z)
		}
	}

	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by BreachDepressions tool"))
	rout.AddMetadataEntry(fmt.Sprintf("Max breach depth: %v", this.maxDepth))
	rout.AddMetadataEntry(fmt.Sprintf("Max breach length: %v", this.maxLength))
	rout.AddMetadataEntry(fmt.Sprintf("Constrained Breaching: %v", this.constrainedBreaching))
	config.DisplayMinimum = displayMin
	config.DisplayMaximum = displayMax
	rout.SetRasterConfig(config)
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)

	if numUnsolvedPits > 0 {
		printf("Num. of unbreached pits/flats: %v (%f%% of total)\n", numUnsolvedPits, (100.0 * float64(numUnsolvedPits) / float64(numSolvedCells)))
	} else {
		println("All pits/flats were resolved by breaching")
	}
}
Пример #3
0
func (this *Whitebox2GeoTiff) Run() {

	// check that the input raster is in Whitebox GAT format
	rasterType, err := raster.DetermineRasterFormat(this.inputFile)
	if rasterType != raster.RT_WhiteboxRaster || err != nil {
		println("The input file is not of a Whitebox GAT format.")
		return
	}

	input, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}

	// get the input config
	inConfig := input.GetRasterConfig()

	// get the number of rows and columns
	rows := input.Rows
	columns := input.Columns
	rowsLessOne := rows - 1
	inNodata := input.NoDataValue

	// check that the specified output file is in GeoTiff format
	rasterType, err = raster.DetermineRasterFormat(this.outputFile)
	if rasterType != raster.RT_GeoTiff || err != nil {
		println("Warning: The specified output file name is not of a GeoTIFF format.\nThe file name has been modified")
		index := strings.LastIndex(this.outputFile, ".")
		extension := this.outputFile[index:len(this.outputFile)]
		newFileName := strings.Replace(this.outputFile, extension, ".tif", -1)
		this.outputFile = newFileName
	}

	// output the data
	outConfig := raster.NewDefaultRasterConfig()
	outConfig.DataType = inConfig.DataType
	outConfig.EPSGCode = inConfig.EPSGCode
	//outConfig.NoDataValue = inConfig.NoDataValue
	outConfig.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	output, err := raster.CreateNewRaster(this.outputFile, input.Rows, input.Columns,
		input.North, input.South, input.East, input.West, outConfig)
	outNodata := output.NoDataValue
	if err != nil {
		println(err.Error())
	}

	var progress, oldProgress int
	var z float64
	oldProgress = -1
	for row := 0; row < rows; row++ {
		for col := 0; col < columns; col++ {
			z = input.Value(row, col)
			if z != inNodata {
				output.SetValue(row, col, z)
			} else {
				output.SetValue(row, col, outNodata)
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rProgress: %v%%", progress)
			oldProgress = progress
		}
	}
	output.Save()
	println("\nOperation complete!")
}
/* This function is only used to benchmark the BreachDepressions tool.
      It can be called by running the tool in 'benchon' mode. The tool is run
	10 times and elapsed times do not include disk I/O. No output file
	is created.
*/
func benchmarkFillDepressions(parent *FillDepressions) {
	println("Benchmarking FillDepressions...")

	var progress, oldProgress, col, row, i, n int
	var colN, rowN, flatindex int
	numSolvedCells := 0
	var z, zN float64
	var gc gridCell
	var p int64
	var isEdgeCell bool
	dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
	dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}

	println("Reading DEM data...")
	dem, err := raster.CreateRasterFromFile(parent.inputFile)
	if err != nil {
		println(err.Error())
	}
	rows := dem.Rows
	columns := dem.Columns
	rowsLessOne := rows - 1
	numCellsTotal := rows * columns
	nodata := dem.NoDataValue
	demConfig := dem.GetRasterConfig()
	paletteName := demConfig.PreferredPalette

	// output the data
	// make a copy of the dem's raster configuration
	//config := dem.GetRasterConfig()
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = paletteName
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = nodata
	displayMin := demConfig.DisplayMinimum
	displayMax := demConfig.DisplayMaximum
	config.DisplayMinimum = displayMin
	config.DisplayMaximum = displayMax
	config.CoordinateRefSystemWKT = demConfig.CoordinateRefSystemWKT
	config.EPSGCode = demConfig.EPSGCode
	value := fmt.Sprintf("Created on %s\n", time.Now().Local())
	config.MetadataEntries = append(config.MetadataEntries, value)
	rout, err := raster.CreateNewRaster(parent.outputFile, rows, columns,
		dem.North, dem.South, dem.East, dem.West, config)
	if err != nil {
		panic("Failed to write raster")
	}

	minVal := dem.GetMinimumValue()
	elevDigits := len(strconv.Itoa(int(dem.GetMaximumValue() - minVal)))
	elevMultiplier := math.Pow(10, float64(8-elevDigits))
	SMALL_NUM := 1 / elevMultiplier
	if !parent.fixFlats {
		SMALL_NUM = 0
	}

	println("The tool will now be run 10 times...")
	var benchTimes [10]time.Duration
	for bt := 0; bt < 10; bt++ {

		println("Run", (bt + 1), "...")

		startTime := time.Now()

		// Fill the DEM.
		inQueue := make([][]bool, rows+2)

		for i = 0; i < rows+2; i++ {
			inQueue[i] = make([]bool, columns+2)
		}

		// Reinitialize the priority queue and flow direction grid.
		numSolvedCells = 0

		//pq := make(PriorityQueue, 0)
		pq := NewPQueue()

		// find the pit cells and initialize the grids
		printf("\r                                                      ")
		printf("\rFilling DEM (1 of 2): %v%%", 0)
		oldProgress = 0
		for row = 0; row < rows; row++ {
			for col = 0; col < columns; col++ {
				z = dem.Value(row, col)
				if z != nodata {
					//isPit = true
					isEdgeCell = false
					for n = 0; n < 8; n++ {
						zN = dem.Value(row+dY[n], col+dX[n])
						if zN == nodata {
							isEdgeCell = true
						}
					}

					if isEdgeCell {
						gc = newGridCell(row, col, 0)
						p = int64(int64(zN*elevMultiplier) * 100000)
						pq.Push(gc, p)
						inQueue[row+1][col+1] = true
						rout.SetValue(row, col, z)
						numSolvedCells++
					}
				} else {
					numSolvedCells++
				}
			}
			progress = int(100.0 * row / rowsLessOne)
			if progress != oldProgress {
				printf("\rFilling DEM (1 of 2): %v%%", progress)
				oldProgress = progress
			}
		}

		printf("\r                                                      ")
		oldProgress = -1
		for pq.Len() > 0 {
			gc = pq.Pop()
			row = gc.row
			col = gc.column
			flatindex = gc.flatIndex
			z = rout.Value(row, col)
			for i = 0; i < 8; i++ {
				rowN = row + dY[i]
				colN = col + dX[i]
				zN = dem.Value(rowN, colN)
				if zN != nodata && !inQueue[rowN+1][colN+1] {
					n = 0
					if zN <= z {
						zN = z + SMALL_NUM
						n = flatindex + 1
					}
					numSolvedCells++
					rout.SetValue(rowN, colN, zN)
					gc = newGridCell(rowN, colN, n)
					p = int64(int64(zN*elevMultiplier)*100000 + (int64(n) % 100000))
					pq.Push(gc, p)
					inQueue[rowN+1][colN+1] = true
				}
			}
			progress = int(100.0 * numSolvedCells / numCellsTotal)
			if progress != oldProgress {
				printf("\rFilling DEM (2 of 2): %v%%", progress)
				oldProgress = progress
			}
		}

		benchTimes[bt] = time.Since(startTime)
		printf("     Elapsed time (s): %v\n", benchTimes[bt].Seconds())
	}
	println("")
	println("Elapsed times (in sec.) of the 10 runs:")
	avgVal := 0.0
	for i := 0; i < 10; i++ {
		println(benchTimes[i].Seconds())
		avgVal += benchTimes[i].Seconds()
	}

	println("Average Time: ", avgVal/10.0)

	println("Operation complete!")

}
Пример #5
0
func (this *BreachStreams) Run() {
	start1 := time.Now()

	var progress, oldProgress, col, row, i, n int
	var colN, rowN, r, c, flatindex int
	numSolvedCells := 0
	var dir byte
	var z, zN, lowestNeighbour, s, sN float64
	var zTest, zN2, zN3 float64
	var gc gridCell
	var p int64
	var isPit, isEdgeCell, isStream bool
	numPits := 0
	numPitsSolved := 0
	numUnsolvedPits := 0
	numValidCells := 0
	var isActive bool
	dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
	dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
	backLink := [8]byte{5, 6, 7, 8, 1, 2, 3, 4}

	println("Reading input data...")
	dem, err := raster.CreateRasterFromFile(this.demFile)
	if err != nil {
		println(err.Error())
	}
	demConfig := dem.GetRasterConfig()
	rows := dem.Rows
	columns := dem.Columns
	rowsLessOne := rows - 1
	numCellsTotal := rows * columns
	nodata := dem.NoDataValue
	paletteName := demConfig.PreferredPalette
	minVal := dem.GetMinimumValue()
	elevDigits := len(strconv.Itoa(int(dem.GetMaximumValue() - minVal)))
	elevMultiplier := math.Pow(10, float64(8-elevDigits))
	SMALL_NUM := 1 / elevMultiplier * 10
	POS_INF := math.Inf(1)

	streams, err := raster.CreateRasterFromFile(this.streamFile)
	if err != nil {
		println(err.Error())
	}
	if streams.Rows != rows || streams.Columns != columns {
		println("The input rasters must be of the same dimensions.")
		return
	}
	streamsNodata := streams.NoDataValue

	start2 := time.Now()

	output := make([][]float64, rows+2)
	pits := make([][]bool, rows+2)
	inQueue := make([][]bool, rows+2)
	flowdir := make([][]byte, rows+2)

	for i = 0; i < rows+2; i++ {
		output[i] = make([]float64, columns+2)
		pits[i] = make([]bool, columns+2)
		inQueue[i] = make([]bool, columns+2)
		flowdir[i] = make([]byte, columns+2)
	}

	pq := NewPQueue()

	//	oldProgress = 0
	//	for row = 0; row < rows; row++ {
	//		for col = 0; col < columns; col++ {
	//			z = dem.Value(row, col)
	//			output[row+1][col+1] = z
	//			flowdir[row+1][col+1] = 0
	//			if z != nodata {
	//				s = streams.Value(row, col)
	//				if s != streamsNodata && s > 0 {
	//					lowestNeighbour = POS_INF
	//					for n = 0; n < 8; n++ {
	//						sN = streams.Value(row+dY[n], col+dX[n])
	//						if sN != streamsNodata && sN > 0 {
	//							zN = dem.Value(row+dY[n], col+dX[n])
	//							if zN < lowestNeighbour {
	//								lowestNeighbour = zN
	//							}
	//						}
	//					}
	//					if lowestNeighbour < z {
	//						output[row+1][col+1] = lowestNeighbour - SMALL_NUM
	//					}
	//				}
	//			}
	//		}
	//		progress = int(100.0 * row / rowsLessOne)
	//		if progress != oldProgress {
	//			printf("\rBreaching DEM (1 of 3): %v%%", progress)
	//			oldProgress = progress
	//		}
	//	}

	// find the pit cells and initialize the grids
	printf("\rBreaching DEM (1 of 2): %v%%", 0)
	oldProgress = 0
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = dem.Value(row, col)
			output[row+1][col+1] = z
			flowdir[row+1][col+1] = 0
			//z = output[row+1][col+1]
			if z != nodata {
				isPit = true
				isEdgeCell = false
				lowestNeighbour = POS_INF
				s = streams.Value(row, col)
				if s != streamsNodata && s > 0 {
					isStream = true
				} else {
					isStream = false
				}

				for n = 0; n < 8; n++ {
					zN = dem.Value(row+dY[n], col+dX[n])
					//zN = output[row+dY[n]+1][col+dX[n]+1]
					sN = streams.Value(row+dY[n], col+dX[n])
					if zN != nodata && zN < z { // there's a lower cell
						if !isStream {
							isPit = false
							//break
						} else {
							if sN != streamsNodata && sN > 0 { // there's a lower stream cell; it's not a stream pit
								isPit = false
								//break
							}
						}

					} else if zN == nodata {
						isEdgeCell = true
					} else {
						if zN < lowestNeighbour {
							lowestNeighbour = zN
						}
					}
				}

				if isEdgeCell {
					gc = newGridCell(row+1, col+1, 0)
					if isStream {
						p = int64(int64(z*elevMultiplier) * 10000)
						// given their much higher priorities, stream cells will always
						// be visited before non-stream cells when they are present
						// in the queue.
					} else {
						p = int64(10000000000000 + int64(z*elevMultiplier)*10000)
					}
					pq.Push(gc, p)
					inQueue[row+1][col+1] = true
				}
				if isPit {
					if !isEdgeCell {
						pits[row+1][col+1] = true
						numPits++
					}
					/* raising a pit cell to just lower than the
					 *  elevation of its lowest neighbour will
					 *  reduce the length and depth of the trench
					 *  that is necessary to eliminate the pit
					 *  by quite a bit on average.
					 */
					if lowestNeighbour != POS_INF && !isStream { // this shouldn't be done for stream cells
						output[row+1][col+1] = lowestNeighbour - SMALL_NUM
					}
					//}
				}
				numValidCells++
			} else {
				numSolvedCells++
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rBreaching DEM (1 of 2): %v%%", progress)
			oldProgress = progress
		}
	}

	for row = 0; row < rows+2; row++ {
		output[row][0] = nodata
		output[row][columns+1] = nodata
		flowdir[row][0] = 0
		flowdir[row][columns+1] = 0
	}

	for col = 0; col < columns+2; col++ {
		output[0][col] = nodata
		output[rows+1][col] = nodata
		flowdir[0][col] = 0
		flowdir[rows+1][col] = 0
	}

	// now breach
	printf("\r                                                                 ")
	oldProgress = int(100.0 * numSolvedCells / numCellsTotal)
	printf("\rBreaching DEM (2 of 2): %v%%", oldProgress)

	// Perform a complete breaching solution; there will be no subseqent filling
	for numPitsSolved < numPits {
		gc = pq.Pop()
		row = gc.row
		col = gc.column
		flatindex = gc.flatIndex

		//		s = streams.Value(row, col)
		//		if s != streamsNodata && s > 0 {
		//			output[row+1][col+1] -= 10.0
		//		}

		for i = 0; i < 8; i++ {
			rowN = row + dY[i]
			colN = col + dX[i]
			zN = output[rowN][colN]
			if zN != nodata && !inQueue[rowN][colN] {
				flowdir[rowN][colN] = backLink[i]
				if pits[rowN][colN] {
					numPitsSolved++
					// trace the flowpath back until you find a lower cell
					zTest = zN
					r = rowN
					c = colN
					isActive = true
					for isActive {
						zTest -= SMALL_NUM // ensures a small increment slope
						s = streams.Value(r, c)
						if s > 0 && s != streamsNodata {
							// is there a neighbouring non-stream cell that is lower than zTest?
							lowestNeighbour = POS_INF // this will actually be the lowest non-stream neighbour
							for n = 0; n < 8; n++ {
								sN = streams.Value(r+dY[n], c+dX[n])
								zN3 = output[r+dY[n]][c+dX[n]]
								if (sN == 0 || sN == streamsNodata) && zN3 != nodata { // it's a non-stream but valid neighbour
									if zN3 < lowestNeighbour {
										lowestNeighbour = zN3
									}
								}
							}
							if lowestNeighbour < zTest {
								zTest = lowestNeighbour - SMALL_NUM
							}
						}
						dir = flowdir[r][c]
						if dir > 0 {
							r += dY[dir-1]
							c += dX[dir-1]
							zN2 = output[r][c]
							if zN2 <= zTest || zN2 == nodata {
								// a lower grid cell or edge has been found
								isActive = false
							} else {
								output[r][c] = zTest
							}
						} else {
							// a pit has been located, likely at the edge
							isActive = false
						}
					}
				}
				numSolvedCells++
				n = 0
				if pits[rowN][colN] {
					n = flatindex + 1
				}
				gc = newGridCell(rowN, colN, n)
				s = streams.Value(rowN-1, colN-1)
				if s != streamsNodata && s > 0 {
					isStream = true
				} else {
					isStream = false
				}
				if isStream {
					p = int64(int64(zN*elevMultiplier)*10000 + (int64(n) % 10000))
				} else {
					p = int64(10000000000000 + int64(zN*elevMultiplier)*10000 + (int64(n) % 10000))
				}
				pq.Push(gc, p)
				inQueue[rowN][colN] = true
			}
		}
		progress = int(100.0 * numSolvedCells / numCellsTotal)
		if progress != oldProgress {
			printf("\rBreaching DEM (2 of 2): %v%%", progress)
			oldProgress = progress
		}
	}

	// output the data
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = paletteName
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	displayMin := demConfig.DisplayMinimum
	displayMax := demConfig.DisplayMaximum
	config.CoordinateRefSystemWKT = demConfig.CoordinateRefSystemWKT
	config.EPSGCode = demConfig.EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		dem.North, dem.South, dem.East, dem.West, config)
	if err != nil {
		panic("Failed to write raster")
	}

	printf("\nSaving DEM data...\n")
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			//			s = streams.Value(row, col)
			//			if s != streamsNodata && s > 0 && output[row+1][col+1] != nodata {
			//				z = output[row+1][col+1] - SMALL_NUM*2
			//			} else {
			//				z = output[row+1][col+1]
			//			}
			z = output[row+1][col+1]
			rout.SetValue(row, col, z)
		}
	}

	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by BreachStreams tool"))
	config.DisplayMinimum = displayMin
	config.DisplayMaximum = displayMax
	rout.SetRasterConfig(config)
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)

	if numUnsolvedPits > 0 {
		printf("Num. of unbreached pits/flats: %v (%f%% of total)\n", numUnsolvedPits, (100.0 * float64(numUnsolvedPits) / float64(numSolvedCells)))
	} else {
		println("All pits/flats were resolved by breaching")
	}
}
Пример #6
0
func (this *DeviationFromMean) Run() {
	start1 := time.Now()

	var progress, oldProgress, col, row int
	var z, sum, sumSqr float64
	var sumN, N int
	var x1, x2, y1, y2 int
	var outValue, v, s, m float64

	println("Reading raster data...")
	rin, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}
	rows := rin.Rows
	columns := rin.Columns
	rowsLessOne := rows - 1
	nodata := rin.NoDataValue
	inConfig := rin.GetRasterConfig()
	minValue := rin.GetMinimumValue()
	maxValue := rin.GetMaximumValue()
	valueRange := maxValue - minValue
	k := minValue + valueRange/2.0

	start2 := time.Now()

	I := make([][]float64, rows)
	I2 := make([][]float64, rows)
	IN := make([][]int, rows)

	for row = 0; row < rows; row++ {
		I[row] = make([]float64, columns)
		I2[row] = make([]float64, columns)
		IN[row] = make([]int, columns)
	}

	// calculate the integral image
	printf("\rCalculating integral image (1 of 2): %v%%\n", 0)
	oldProgress = 0
	for row = 0; row < rows; row++ {
		sum = 0
		sumSqr = 0
		sumN = 0
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z == nodata {
				z = 0
			} else {
				z = z - k
				sumN++
			}
			sum += z
			sumSqr += z * z
			if row > 0 {
				I[row][col] = sum + I[row-1][col]
				I2[row][col] = sumSqr + I2[row-1][col]
				IN[row][col] = sumN + IN[row-1][col]
			} else {
				I[row][col] = sum
				I2[row][col] = sumSqr
				IN[row][col] = sumN
			}

		}
		progress = int(100.0 * row / rowsLessOne)
		if progress%5 == 0 && progress != oldProgress {
			printf("\rCalculating integral image (1 of 2): %v%%\n", progress)
			oldProgress = progress
		}
	}

	// output the data
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = "blue_white_red.plt"
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = nodata
	config.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config.EPSGCode = inConfig.EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config)
	if err != nil {
		println("Failed to write raster")
		return
	}

	printf("\rPerforming analysis (2 of 2): %v%%\n", 0)
	oldProgress = 0
	for row = 0; row < rows; row++ {
		y1 = row - this.neighbourhoodSize
		if y1 < 0 {
			y1 = 0
		}
		if y1 >= rows {
			y1 = rows - 1
		}

		y2 = row + this.neighbourhoodSize
		if y2 < 0 {
			y2 = 0
		}
		if y2 >= rows {
			y2 = rows - 1
		}
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z != nodata {
				x1 = col - this.neighbourhoodSize
				if x1 < 0 {
					x1 = 0
				}
				if x1 >= columns {
					x1 = columns - 1
				}

				x2 = col + this.neighbourhoodSize
				if x2 < 0 {
					x2 = 0
				}
				if x2 >= columns {
					x2 = columns - 1
				}

				N = IN[y2][x2] + IN[y1][x1] - IN[y1][x2] - IN[y2][x1]
				if N > 0 {
					sum = I[y2][x2] + I[y1][x1] - I[y1][x2] - I[y2][x1]
					sumSqr = I2[y2][x2] + I2[y1][x1] - I2[y1][x2] - I2[y2][x1]
					v = (sumSqr - (sum*sum)/float64(N)) / float64(N)
					if v > 0 {
						s = math.Sqrt(v)
						m = sum / float64(N)
						outValue = ((z - k) - m) / s
						rout.SetValue(row, col, outValue)
					} else {
						rout.SetValue(row, col, 0)
					}
				} else {
					rout.SetValue(row, col, 0)
				}
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress%5 == 0 && progress != oldProgress {
			printf("\rPerforming analysis (2 of 2): %v%%\n", progress)
			oldProgress = progress
		}
	}

	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by DeviationFromMean tool"))
	rout.AddMetadataEntry(fmt.Sprintf("Window size: %v", (this.neighbourhoodSize*2 + 1)))
	config.DisplayMinimum = -2.58
	config.DisplayMaximum = 2.58
	rout.SetRasterConfig(config)
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
Пример #7
0
func (this *FillSmallNodataHoles) Run() {
	start1 := time.Now()

	var progress, oldProgress, col, row int
	var z, zN1, zN2 float64

	println("Reading raster data...")
	rin, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}

	start2 := time.Now()

	rows := rin.Rows
	columns := rin.Columns
	rowsLessOne := rows - 1
	nodata := rin.NoDataValue
	inConfig := rin.GetRasterConfig()

	// create the output raster
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = inConfig.PreferredPalette
	config.DataType = inConfig.DataType
	config.NoDataValue = nodata
	config.InitialValue = nodata
	config.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config.EPSGCode = inConfig.EPSGCode
	config.DisplayMinimum = inConfig.DisplayMinimum
	config.DisplayMaximum = inConfig.DisplayMaximum
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config)
	if err != nil {
		println("Failed to write raster")
		return
	}

	printf("\r                                                           ")

	oldProgress = -1
	for row = 1; row < rows-1; row++ {
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z == nodata {
				zN1 = rin.Value(row-1, col)
				zN2 = rin.Value(row+1, col)
				if zN1 != nodata && zN2 != nodata {
					rout.SetValue(row, col, (zN1+zN2)/2.0)
				}
			} else {
				rout.SetValue(row, col, z)
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rProgress (Loop 1 of 2): %v%%", progress)
			oldProgress = progress
		}
	}

	oldProgress = -1
	for row = 0; row < rows; row++ {
		for col = 1; col < columns-1; col++ {
			z = rout.Value(row, col)
			if z == nodata {
				zN1 = rout.Value(row, col-1)
				zN2 = rout.Value(row, col+1)
				if zN1 != nodata && zN2 != nodata {
					rout.SetValue(row, col, (zN1+zN2)/2.0)
				}
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rProgress (Loop 2 of 2): %v%%", progress)
			oldProgress = progress
		}
	}

	printf("\r                                                           ")
	printf("\rSaving data...\n")

	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by FillSmallNodataHoles"))
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
func (this *DeviationFromMeanTraditional) Run() {
	start1 := time.Now()

	var progress, oldProgress, col, row int
	var z, zN, sum, sumSqr float64
	var n int
	var x1, x2, y1, y2 int
	var i, j int
	var outValue, v, s, m float64

	println("Reading raster data...")
	rin, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}
	rows := rin.Rows
	columns := rin.Columns
	rowsLessOne := rows - 1
	nodata := rin.NoDataValue
	inConfig := rin.GetRasterConfig()
	minValue := rin.GetMinimumValue()
	maxValue := rin.GetMaximumValue()
	valueRange := maxValue - minValue
	k := minValue + valueRange/2.0

	start2 := time.Now()

	// output the data
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = "blue_white_red.plt"
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = nodata
	config.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config.EPSGCode = inConfig.EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config)
	if err != nil {
		println("Failed to write raster")
		return
	}

	printf("\rPerforming analysis: %v%%\n", 0)
	oldProgress = 0
	for row = 0; row < rows; row++ {
		y1 = row - this.neighbourhoodSize
		y2 = row + this.neighbourhoodSize
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z != nodata {
				x1 = col - this.neighbourhoodSize
				x2 = col + this.neighbourhoodSize

				n = 0
				sum = 0.0
				sumSqr = 0.0
				for i = y1; i <= y2; i++ {
					for j = x1; j <= x2; j++ {
						zN = rin.Value(i, j)
						if zN != nodata {
							n++
							sum += zN - k
							sumSqr += (zN - k) * (zN - k)
						}
					}
				}

				if n > 2 {
					v = (sumSqr - (sum*sum)/float64(n)) / float64(n) // eq. from Annabel Ness Evans page 92 //(sumOfTheSquares / n) - (average * average);
					if v > 0 {
						s = math.Sqrt(v)
						m = sum / float64(n)
						outValue = ((z - k) - m) / s
						rout.SetValue(row, col, outValue)
					} else {
						rout.SetValue(row, col, 0.0)
					}

				} else {
					rout.SetValue(row, col, 0.0)
				}

				//				N = IN[y2][x2] + IN[y1][x1] - IN[y1][x2] - IN[y2][x1]
				//				if N > 0 {
				//					sum = I[y2][x2] + I[y1][x1] - I[y1][x2] - I[y2][x1]
				//					sumSqr = I2[y2][x2] + I2[y1][x1] - I2[y1][x2] - I2[y2][x1]
				//					v = (sumSqr - (sum*sum)/float64(N)) / float64(N)
				//					if v > 0 {
				//						s = math.Sqrt(v)
				//						m = sum / float64(N)
				//						outValue = ((z - k) - m) / s
				//						rout.SetValue(row, col, outValue)
				//					} else {
				//						rout.SetValue(row, col, 0)
				//					}
				//				} else {
				//					rout.SetValue(row, col, 0)
				//				}
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress%5 == 0 && progress != oldProgress {
			printf("\rPerforming analysis: %v%%\n", progress)
			oldProgress = progress
		}
	}

	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by DeviationFromMeanTraditional tool"))
	rout.AddMetadataEntry(fmt.Sprintf("Window size: %v", (this.neighbourhoodSize*2 + 1)))
	config.DisplayMinimum = -2.58
	config.DisplayMaximum = 2.58
	rout.SetRasterConfig(config)
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
/* This function is only used to benchmark the BreachDepressions tool.
      It can be called by running the tool in 'benchon' mode. The tool is run
	10 times and elapsed times do not include disk I/O. No output file
	is created.
*/
func benchmarkBreachDepressions(parent *BreachDepressions) {
	println("Benchmarking BreachDepressions...")

	var progress, oldProgress, col, row, i, n int
	var colN, rowN, r, c, flatindex int
	var dir byte
	needsFilling := false
	var z, zN, lowestNeighbour float64
	var zTest, zN2 float64
	var gc gridCell
	var p int64
	var breachDepth, maxPathBreachDepth float64
	var numCellsInPath int32
	var isPit, isEdgeCell bool
	var isActive bool
	dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
	dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
	backLink := [8]byte{5, 6, 7, 8, 1, 2, 3, 4}
	//outPointer := [9]float64{0, 1, 2, 4, 8, 16, 32, 64, 128}
	maxLengthOrDepthUsed := false
	if parent.maxDepth > 0 || parent.maxLength > 0 {
		maxLengthOrDepthUsed = true
	}
	if maxLengthOrDepthUsed && parent.maxDepth == -1 {
		parent.maxDepth = math.MaxFloat64
	}
	if maxLengthOrDepthUsed && parent.maxLength == -1 {
		parent.maxLength = math.MaxInt32
	}
	performConstrainedBreaching := parent.constrainedBreaching
	if !maxLengthOrDepthUsed && performConstrainedBreaching {
		performConstrainedBreaching = false
	}
	println("Reading DEM data...")
	dem, err := raster.CreateRasterFromFile(parent.inputFile)
	if err != nil {
		println(err.Error())
	}
	rows := dem.Rows
	columns := dem.Columns
	rowsLessOne := rows - 1
	numCellsTotal := rows * columns
	nodata := dem.NoDataValue
	minVal := dem.GetMinimumValue()
	elevDigits := len(strconv.Itoa(int(dem.GetMaximumValue() - minVal)))
	elevMultiplier := math.Pow(10, float64(8-elevDigits))
	SMALL_NUM := 1 / elevMultiplier
	POS_INF := math.Inf(1)

	println("The tool will now be run 10 times...")
	var benchTimes [10]time.Duration
	for bt := 0; bt < 10; bt++ {

		println("Run", (bt + 1), "...")

		startTime := time.Now()

		numSolvedCells := 0
		numPits := 0
		numPitsSolved := 0
		numValidCells := 0

		output := make([][]float64, rows+2)
		pits := make([][]bool, rows+2)
		inQueue := make([][]bool, rows+2)
		flowdir := make([][]byte, rows+2)

		for i = 0; i < rows+2; i++ {
			output[i] = make([]float64, columns+2)
			pits[i] = make([]bool, columns+2)
			inQueue[i] = make([]bool, columns+2)
			flowdir[i] = make([]byte, columns+2)
		}

		//		output := structures.Create2dFloat64Array(rows+2, columns+2)
		//		pits := structures.Create2dBoolArray(rows+2, columns+2)
		//		inQueue := structures.Create2dBoolArray(rows+2, columns+2)
		//		flowdir := structures.Create2dByteArray(rows+2, columns+2)

		pq := NewPQueue()
		//floodorder := NewQueue()
		var floodorder []int
		//floodorder := make([]int, numCellsTotal)
		floodOrderTail := 0

		// find the pit cells and initialize the grids
		printf("\rBreaching DEM (1 of 2): %v%%", 0)
		oldProgress = 0
		for row = 0; row < rows; row++ {
			for col = 0; col < columns; col++ {
				z = dem.Value(row, col) // input[row+1][col+1]
				output[row+1][col+1] = z
				flowdir[row+1][col+1] = 0
				if z != nodata {
					isPit = true
					isEdgeCell = false
					lowestNeighbour = POS_INF
					for n = 0; n < 8; n++ {
						zN = dem.Value(row+dY[n], col+dX[n]) //input[row+dY[n]+1][col+dX[n]+1]
						if zN != nodata && zN < z {
							isPit = false
							break
						} else if zN == nodata {
							isEdgeCell = true
						} else {
							if zN < lowestNeighbour {
								lowestNeighbour = zN
							}
						}
					}
					if isEdgeCell {
						gc = newGridCell(row+1, col+1, 0)
						p = int64(int64(z*elevMultiplier) * 100000)
						pq.Push(gc, p)
						inQueue[row+1][col+1] = true
					}
					if isPit {
						if !isEdgeCell {
							pits[row+1][col+1] = true
							numPits++
						}
						/* raising a pit cell to just lower than the
						 *  elevation of its lowest neighbour will
						 *  reduce the length and depth of the trench
						 *  that is necessary to eliminate the pit
						 *  by quite a bit on average.
						 */
						if lowestNeighbour != POS_INF {
							output[row+1][col+1] = lowestNeighbour - SMALL_NUM
						}
						//}
					}
					numValidCells++
				} else {
					numSolvedCells++
				}
			}
			progress = int(100.0 * row / rowsLessOne)
			if progress != oldProgress {
				printf("\rBreaching DEM (1 of 2): %v%%", progress)
				oldProgress = progress
			}
		}

		for row = 0; row < rows+2; row++ {
			output[row][0] = nodata
			output[row][columns+1] = nodata
			flowdir[row][0] = 0
			flowdir[row][columns+1] = 0
		}

		for col = 0; col < columns+2; col++ {
			output[0][col] = nodata
			output[rows+1][col] = nodata
			flowdir[0][col] = 0
			flowdir[rows+1][col] = 0
		}

		// now breach
		printf("\r                                                                 ")
		oldProgress = int(100.0 * numSolvedCells / numCellsTotal)
		printf("\rBreaching DEM (2 of 2): %v%%", oldProgress)

		if !maxLengthOrDepthUsed {
			// Perform a complete breaching solution; there will be no subseqent filling
			for numPitsSolved < numPits {
				gc = pq.Pop()
				row = gc.row
				col = gc.column
				flatindex = gc.flatIndex
				for i = 0; i < 8; i++ {
					rowN = row + dY[i]
					colN = col + dX[i]
					zN = output[rowN][colN]
					if zN != nodata && !inQueue[rowN][colN] {
						flowdir[rowN][colN] = backLink[i]
						if pits[rowN][colN] {
							numPitsSolved++
							// trace the flowpath back until you find a lower cell
							zTest = zN
							r = rowN
							c = colN
							isActive = true
							for isActive {
								zTest -= SMALL_NUM // ensures a small increment slope
								dir = flowdir[r][c]
								if dir > 0 {
									r += dY[dir-1]
									c += dX[dir-1]
									zN2 = output[r][c]
									if zN2 <= zTest || zN2 == nodata {
										// a lower grid cell or edge has been found
										isActive = false
									} else {
										output[r][c] = zTest
									}
								} else {
									// a pit has been located, likely at the edge
									isActive = false
								}
							}
						}
						numSolvedCells++
						n = 0
						if pits[rowN][colN] {
							n = flatindex + 1
						}
						gc = newGridCell(rowN, colN, n)
						p = int64(int64(zN*elevMultiplier)*100000 + (int64(n) % 100000))
						pq.Push(gc, p)
						inQueue[rowN][colN] = true
					}
				}
				progress = int(100.0 * numSolvedCells / numCellsTotal)
				if progress != oldProgress {
					printf("\rBreaching DEM (2 of 2): %v%%", progress)
					oldProgress = progress
				}
			}
		} else if !performConstrainedBreaching {
			// Perform selective breaching. Sinks that can be removed within the
			// specified constraints of the max breach length and depth will
			// be breached. Otherwise they will be removed during a subsequent
			// filling operation.
			floodorder = make([]int, numValidCells)
			for pq.Len() > 0 {
				gc = pq.Pop()
				row = gc.row
				col = gc.column
				if parent.postBreachFilling {
					//floodorder.Push(row, col)
					floodorder[floodOrderTail] = row*columns + col
					floodOrderTail++
				}
				flatindex = gc.flatIndex
				for i = 0; i < 8; i++ {
					rowN = row + dY[i]
					colN = col + dX[i]
					zN = output[rowN][colN]
					if zN != nodata && !inQueue[rowN][colN] {
						flowdir[rowN][colN] = backLink[i]
						if pits[rowN][colN] {
							numPitsSolved++
							// trace the flowpath back until you find a lower cell
							// or a constraint is encountered
							numCellsInPath = 0
							maxPathBreachDepth = 0

							zTest = zN
							r = rowN
							c = colN
							isActive = true
							for isActive {
								zTest -= SMALL_NUM // ensures a small increment slope
								dir = flowdir[r][c]
								if dir > 0 {
									r += dY[dir-1]
									c += dX[dir-1]
									zN2 = output[r][c]
									if zN2 <= zTest || zN2 == nodata {
										// a lower grid cell has been found
										isActive = false
									} else {
										breachDepth = dem.Value(r-1, c-1) - zTest //input[r][c] - zTest
										if breachDepth > maxPathBreachDepth {
											maxPathBreachDepth = breachDepth
										}
									}
								} else {
									isActive = false
								}
								numCellsInPath++
								if numCellsInPath > parent.maxLength {
									isActive = false
								}
								if maxPathBreachDepth > parent.maxDepth {
									isActive = false
								}
							}

							if numCellsInPath <= parent.maxLength && maxPathBreachDepth <= parent.maxDepth {
								// breach it completely
								zTest = zN
								r = rowN
								c = colN
								isActive = true
								for isActive {
									zTest -= SMALL_NUM // ensures a small increment slope
									dir = flowdir[r][c]
									if dir > 0 {
										r += dY[dir-1]
										c += dX[dir-1]
										zN2 = output[r][c]
										if zN2 <= zTest || zN2 == nodata {
											// a lower grid cell has been found
											isActive = false
										} else {
											output[r][c] = zTest
										}
									} else {
										isActive = false
									}
								}
							} else {
								// it will be removed by filling in the next step.
								needsFilling = true
							}
						}
						numSolvedCells++
						n = 0
						if pits[rowN][colN] {
							n = flatindex + 1
						}
						gc = newGridCell(rowN, colN, n)
						p = int64(int64(zN*elevMultiplier)*100000 + (int64(n) % 100000))
						pq.Push(gc, p)
						inQueue[rowN][colN] = true
					}
				}
				progress = int(100.0 * numSolvedCells / numCellsTotal)
				if progress != oldProgress {
					printf("\rBreaching DEM (2 of 2): %v%%", progress)
					oldProgress = progress
				}
			}
		} else {
			floodorder = make([]int, numValidCells)
			// perform constrained breaching
			var outletHeight float64
			var outletDist, targetDist, j int32
			var zOrig float64
			for pq.Len() > 0 {
				//item := heap.Pop(&pq).(*Item)
				//gc = item.value
				gc = pq.Pop()
				row = gc.row
				col = gc.column
				if parent.postBreachFilling {
					//floodorder.Push(row, col)
					floodorder[floodOrderTail] = row*columns + col
					floodOrderTail++
				}
				flatindex = gc.flatIndex
				//z = output[row][col]
				for i = 0; i < 8; i++ {
					rowN = row + dY[i]
					colN = col + dX[i]
					zN = output[rowN][colN]
					if zN != nodata && !inQueue[rowN][colN] {
						flowdir[rowN][colN] = backLink[i]
						if pits[rowN][colN] {
							numPitsSolved++
							// trace the flowpath back until you find a lower cell
							// or a constraint is encountered
							numCellsInPath = 0
							maxPathBreachDepth = 0

							zTest = zN
							r = rowN
							c = colN
							outletHeight = -math.MaxFloat64
							outletDist = 0
							isActive = true
							for isActive {
								zTest -= SMALL_NUM // ensures a small increment slope
								dir = flowdir[r][c]
								if dir > 0 {
									r += dY[dir-1]
									c += dX[dir-1]
									zN2 = output[r][c]
									if zN2 <= zTest || zN2 == nodata {
										// a lower grid cell has been found
										isActive = false
									} else {
										zOrig = dem.Value(r-1, c-1) //input[r][c]
										breachDepth = zOrig - zTest
										if breachDepth > maxPathBreachDepth {
											maxPathBreachDepth = breachDepth
										}
										if zOrig > outletHeight {
											outletHeight = zOrig
											outletDist = numCellsInPath
										}
									}
								} else {
									isActive = false
								}
								numCellsInPath++
							}

							if numCellsInPath <= parent.maxLength && maxPathBreachDepth <= parent.maxDepth {
								// breach it completely
								zTest = zN
								r = rowN
								c = colN
								isActive = true
								for isActive {
									zTest -= SMALL_NUM // ensures a small increment slope
									dir = flowdir[r][c]
									if dir > 0 {
										r += dY[dir-1]
										c += dX[dir-1]
										zN2 = output[r][c]
										if zN2 <= zTest || zN2 == nodata {
											// a lower grid cell has been found
											isActive = false
										} else {
											output[r][c] = zTest
										}
									} else {
										isActive = false
									}
								}
							} else {
								// ***Constrained Breaching***
								// it will be completely removed by filling in the next step...
								needsFilling = true
								// but in the meantime, lower the outlet as much as you can.

								zTest = outletHeight - parent.maxDepth
								targetDist = numCellsInPath

								if numCellsInPath > parent.maxLength {
									if outletDist < parent.maxLength/2 {
										targetDist = parent.maxLength
									} else {
										targetDist = outletDist + parent.maxLength/2
									}
									r = rowN
									c = colN
									for j = 0; j < targetDist; j++ {
										dir = flowdir[r][c]
										if dir > 0 {
											r += dY[dir-1]
											c += dX[dir-1]
											zTest = output[r][c]
										} else {
											break
										}
									}
									if outletHeight-zTest > parent.maxDepth {
										zTest = outletHeight - parent.maxDepth
									}
								}

								r = rowN
								c = colN
								isActive = true
								numCellsInPath = 0
								for isActive {
									dir = flowdir[r][c]
									if dir > 0 {
										r += dY[dir-1]
										c += dX[dir-1]
										zN2 = output[r][c]
										if zN2 <= zN || zN2 == nodata {
											// a lower grid cell has been found
											isActive = false
										} else {
											if output[r][c] > zTest {
												output[r][c] = zTest
											}
										}
									} else {
										isActive = false
									}
									numCellsInPath++
									if numCellsInPath > targetDist {
										isActive = false
									}
								}
							}
						}
						numSolvedCells++
						n = 0
						if pits[rowN][colN] {
							n = flatindex + 1
						}
						gc = newGridCell(rowN, colN, n)
						p = int64(int64(zN*elevMultiplier)*100000 + (int64(n) % 100000))
						pq.Push(gc, p)
						inQueue[rowN][colN] = true
					}
				}
				progress = int(100.0 * numSolvedCells / numCellsTotal)
				if progress != oldProgress {
					printf("\rBreaching DEM (2 of 2): %v%%", progress)
					oldProgress = progress
				}
			}
		}

		pits = nil
		inQueue = nil

		if parent.postBreachFilling && needsFilling {
			// Fill the DEM.
			printf("\r                                                                    ")

			numSolvedCells = 0
			//for numSolvedCells < numCellsTotal {
			//	row, col = floodorder.Pop()
			for c := 0; c < numValidCells; c++ {
				row = floodorder[c] / columns
				col = floodorder[c] % columns
				if row >= 0 && col >= 0 {
					z = output[row][col]
					dir = flowdir[row][col]
					if dir > 0 {
						rowN = row + dY[dir-1]
						colN = col + dX[dir-1]
						zN = output[rowN][colN]
						if zN != nodata {
							if z <= zN+SMALL_NUM {
								output[row][col] = zN + SMALL_NUM
							}
						}
					}
				}
				numSolvedCells++
				progress = int(100.0 * numSolvedCells / numValidCells)
				if progress != oldProgress {
					printf("\rFilling DEM: %v%%", progress)
					oldProgress = progress
				}
			}
		}

		benchTimes[bt] = time.Since(startTime)
		printf("     Elapsed time (s): %v\n", benchTimes[bt].Seconds())
	}
	println("")
	println("Elapsed times (in sec.) of the 10 runs:")
	avgVal := 0.0
	for i := 0; i < 10; i++ {
		println(benchTimes[i].Seconds())
		avgVal += benchTimes[i].Seconds()
	}

	println("Average Time: ", avgVal/10.0)

	println("Operation complete!")

}
Пример #10
0
func (this *FillDepressions) Run() {

	if this.toolManager.BenchMode {
		benchmarkFillDepressions(this)
		return
	}

	start1 := time.Now()

	var progress, oldProgress, col, row, i, n int
	var colN, rowN, flatindex int
	numSolvedCells := 0
	var z, zN float64
	var gc gridCell
	var p int64
	var isEdgeCell bool
	dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
	dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}

	println("Reading DEM data...")
	dem, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}
	rows := dem.Rows
	columns := dem.Columns
	rowsLessOne := rows - 1
	numCellsTotal := rows * columns
	nodata := dem.NoDataValue
	demConfig := dem.GetRasterConfig()
	paletteName := demConfig.PreferredPalette

	// output the data
	// make a copy of the dem's raster configuration
	//config := dem.GetRasterConfig()
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = paletteName
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = nodata
	displayMin := demConfig.DisplayMinimum
	displayMax := demConfig.DisplayMaximum
	config.DisplayMinimum = displayMin
	config.DisplayMaximum = displayMax
	config.CoordinateRefSystemWKT = demConfig.CoordinateRefSystemWKT
	config.EPSGCode = demConfig.EPSGCode
	value := fmt.Sprintf("Created on %s\n", time.Now().Local())
	config.MetadataEntries = append(config.MetadataEntries, value)
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		dem.North, dem.South, dem.East, dem.West, config)
	if err != nil {
		panic("Failed to write raster")
	}

	minVal := dem.GetMinimumValue()
	elevDigits := len(strconv.Itoa(int(dem.GetMaximumValue() - minVal)))
	elevMultiplier := math.Pow(10, float64(8-elevDigits))
	SMALL_NUM := 1 / elevMultiplier
	if !this.fixFlats {
		SMALL_NUM = 0
	}

	start2 := time.Now()

	// Fill the DEM.
	inQueue := make([][]bool, rows+2)

	for i = 0; i < rows+2; i++ {
		inQueue[i] = make([]bool, columns+2)
	}
	//inQueue := structures.Create2dBoolArray(rows+2, columns+2)

	// Reinitialize the priority queue and flow direction grid.
	numSolvedCells = 0

	//pq := make(PriorityQueue, 0)
	pq := NewPQueue()

	// find the pit cells and initialize the grids
	printf("\r                                                      ")
	printf("\rFilling DEM (1 of 2): %v%%", 0)
	oldProgress = 0
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = dem.Value(row, col)
			if z != nodata {
				//isPit = true
				isEdgeCell = false
				for n = 0; n < 8; n++ {
					zN = dem.Value(row+dY[n], col+dX[n])
					if zN == nodata {
						isEdgeCell = true
					} // else if zN < z {
					//	isPit = false
					//}
				}

				if isEdgeCell { //}&& isPit {
					gc = newGridCell(row, col, 0)
					p = int64(int64(zN*elevMultiplier) * 100000)
					//					item := &Item{
					//						value:    gc,
					//						priority: p,
					//					}
					//					heap.Push(&pq, item)
					pq.Push(gc, p)
					inQueue[row+1][col+1] = true
					rout.SetValue(row, col, z)
					numSolvedCells++
				}
			} else {
				numSolvedCells++
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rFilling DEM (1 of 2): %v%%", progress)
			oldProgress = progress
		}
	}

	//heap.Init(&pq)
	printf("\r                                                      ")
	oldProgress = -1
	for numSolvedCells < numCellsTotal { //pq.Len() > 0 {
		//item := heap.Pop(&pq).(*Item)
		//gc = item.value
		gc = pq.Pop()
		row = gc.row
		col = gc.column
		flatindex = gc.flatIndex
		z = rout.Value(row, col)
		for i = 0; i < 8; i++ {
			rowN = row + dY[i]
			colN = col + dX[i]
			zN = dem.Value(rowN, colN)
			if zN != nodata && !inQueue[rowN+1][colN+1] {
				n = 0
				if zN <= z {
					zN = z + SMALL_NUM
					n = flatindex + 1
				}
				numSolvedCells++
				rout.SetValue(rowN, colN, zN)
				gc = newGridCell(rowN, colN, n)
				p = int64(int64(zN*elevMultiplier)*100000 + (int64(n) % 100000))
				//				item = &Item{
				//					value:    gc,
				//					priority: p,
				//				}
				//				heap.Push(&pq, item)
				pq.Push(gc, p)
				inQueue[rowN+1][colN+1] = true
			}
		}
		progress = int(100.0 * numSolvedCells / numCellsTotal)
		if progress != oldProgress {
			printf("\rFilling DEM (2 of 2): %v%%", progress)
			oldProgress = progress
		}
	}

	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by FillDepressions tool"))
	config.DisplayMinimum = displayMin
	config.DisplayMaximum = displayMax
	rout.SetRasterConfig(config)
	rout.Save()

	println("\nOperation complete!")

	value = fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
Пример #11
0
func (this *Hillshade) Run() {
	start1 := time.Now()

	var progress, oldProgress int

	azimuth := (315.0 - 90.0) * DegToRad
	altitude := 30.0 * DegToRad
	sinTheta := math.Sin(altitude)
	cosTheta := math.Cos(altitude)

	println("Reading raster data...")
	rin, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}

	start2 := time.Now()

	rows := rin.Rows
	columns := rin.Columns
	rowsLessOne := rows - 1
	nodata := rin.NoDataValue
	inConfig := rin.GetRasterConfig()
	gridRes := (rin.GetCellSizeX() + rin.GetCellSizeY()) / 2.0
	eightGridRes := 8 * gridRes
	const radToDeg float64 = 180.0 / math.Pi
	rin.GetRasterConfig()

	// create the output raster
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = "grey.pal"
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = nodata
	config.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config.EPSGCode = inConfig.EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config)
	if err != nil {
		println("Failed to write raster")
		return
	}

	zConvFactor := 1.0
	if rin.IsInGeographicCoordinates() {
		// calculate a new z-conversion factor
		midLat := (rin.North - rin.South) / 2.0
		if midLat <= 90 && midLat >= -90 {
			zConvFactor = 1.0 / (113200 * math.Cos(math.Pi/180.0*midLat))
		}
	}

	numCPUs := runtime.NumCPU()
	c1 := make(chan [256]int)
	c2 := make(chan int)
	runtime.GOMAXPROCS(numCPUs)
	var wg sync.WaitGroup

	// calculate hillshade
	printf("\r                                                    ")
	printf("\rProgress: %v%%", 0)
	startingRow := 0
	var rowBlockSize int = rows / numCPUs

	histo := [256]int{}
	numCells := 0

	k := 0
	for startingRow < rows {
		endingRow := startingRow + rowBlockSize
		if endingRow >= rows {
			endingRow = rows - 1
		}
		wg.Add(1)
		go func(rowSt, rowEnd, k int) {
			defer wg.Done()
			var z, zN, fy, fx, value, tanSlope, aspect, term1, term2, term3 float64
			dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
			dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
			N := [8]float64{}
			for row := rowSt; row <= rowEnd; row++ {
				rowHisto := [256]int{}
				rowNumCells := 0
				floatData := make([]float64, columns)
				for col := 0; col < columns; col++ {
					z = rin.Value(row, col)
					if z != nodata {
						z *= zConvFactor
						for n := 0; n < 8; n++ {
							zN = rin.Value(row+dY[n], col+dX[n])
							if zN != nodata {
								N[n] = zN * zConvFactor
							} else {
								N[n] = z
							}
						}

						fy = (N[6] - N[4] + 2*(N[7]-N[3]) + N[0] - N[2]) / eightGridRes
						fx = (N[2] - N[4] + 2*(N[1]-N[5]) + N[0] - N[6]) / eightGridRes

						if fx != 0 {
							tanSlope = math.Sqrt(fx*fx + fy*fy)
							aspect = (180 - math.Atan(fy/fx)*RadToDeg + 90*(fx/math.Abs(fx))) * DegToRad
							term1 = tanSlope / math.Sqrt(1+tanSlope*tanSlope)
							term2 = sinTheta / tanSlope
							term3 = cosTheta * math.Sin(azimuth-aspect)
							z = term1 * (term2 - term3)
						} else {
							z = 0.5
						}

						value = math.Floor(z * 255)
						if value < 0 {
							value = 0
						}
						floatData[col] = value
						rowHisto[int(value)]++
						rowNumCells++
					} else {
						floatData[col] = nodata
					}
				}
				rout.SetRowValues(row, floatData)
				c1 <- rowHisto // row completed
				c2 <- rowNumCells
			}
		}(startingRow, endingRow, k)
		startingRow = endingRow + 1
		k++
	}

	//rowHisto := [256]int64{}
	oldProgress = 0
	for rowsCompleted := 0; rowsCompleted < rows; rowsCompleted++ {
		rowHisto := <-c1 // a row has successfully completed
		for i := 0; i < 256; i++ {
			histo[i] += rowHisto[i]
		}
		rowNumCells := <-c2
		numCells += rowNumCells
		progress = int(100.0 * float64(rowsCompleted) / float64(rowsLessOne))
		if progress != oldProgress {
			printf("\rProgress: %v%%", progress)
			oldProgress = progress
		}
	}

	wg.Wait()

	// trim the display min and max values by 1%
	newMin := 0.0
	newMax := 0.0
	targetCellNum := int(float64(numCells) * 0.01)
	sum := 0
	for i := 0; i < 256; i++ {
		sum += histo[i]
		if sum >= targetCellNum {
			newMin = float64(i)
			break
		}
	}

	sum = 0
	for i := 255; i >= 0; i-- {
		sum += histo[i]
		if sum >= targetCellNum {
			newMax = float64(i)
			break
		}
	}

	printf("\r                                                           ")
	printf("\rSaving data...\n")

	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start2)

	if newMax > newMin {
		rout.SetDisplayMinimum(newMin)
		rout.SetDisplayMaximum(newMax)
	}
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by Slope"))
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
func (this *MaximumElevationDeviation) Run() {
	start1 := time.Now()

	var progress, oldProgress, col, row int
	var z, sum, sumSqr float64
	var sumN, N int
	var x1, x2, y1, y2 int
	var outValue, v, s, m float64
	var str string

	println("Reading raster data...")
	rin, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}
	rows := rin.Rows
	columns := rin.Columns
	rowsLessOne := rows - 1
	nodata := rin.NoDataValue
	inConfig := rin.GetRasterConfig()
	minValue := rin.GetMinimumValue()
	maxValue := rin.GetMaximumValue()
	valueRange := maxValue - minValue
	k := minValue + valueRange/2.0

	start2 := time.Now()

	I := make([][]float64, rows)
	I2 := make([][]float64, rows)
	IN := make([][]int, rows)
	maxVal := make([][]float64, rows)
	scaleVal := make([][]int, rows)
	zVal := make([][]float64, rows)

	for row = 0; row < rows; row++ {
		I[row] = make([]float64, columns)
		I2[row] = make([]float64, columns)
		IN[row] = make([]int, columns)
		maxVal[row] = make([]float64, columns)
		scaleVal[row] = make([]int, columns)
		zVal[row] = make([]float64, columns)
	}

	// calculate the integral image
	oldProgress = -1
	for row = 0; row < rows; row++ {
		sum = 0
		sumSqr = 0
		sumN = 0
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			zVal[row][col] = z
			if z == nodata {
				z = 0
			} else {
				z = z - k
				sumN++
			}
			sum += z
			sumSqr += z * z
			if row > 0 {
				I[row][col] = sum + I[row-1][col]
				I2[row][col] = sumSqr + I2[row-1][col]
				IN[row][col] = sumN + IN[row-1][col]
			} else {
				I[row][col] = sum
				I2[row][col] = sumSqr
				IN[row][col] = sumN
			}
			maxVal[row][col] = -math.MaxFloat32

		}
		progress = int(100.0 * row / rowsLessOne)
		if progress%5 == 0 && progress != oldProgress {
			printf("Calculating integral image: %v%%\n", progress)
			oldProgress = progress
		}
	}

	oldProgress = -1
	loopNum := 1
	numLoops := int((this.maxNeighbourhood-this.minNeighbourhood)/this.neighbourhoodStep) + 1
	for neighbourhood := this.minNeighbourhood; neighbourhood <= this.maxNeighbourhood; neighbourhood += this.neighbourhoodStep {
		for row = 0; row < rows; row++ {
			y1 = row - neighbourhood
			if y1 < 0 {
				y1 = 0
			}
			if y1 >= rows {
				y1 = rows - 1
			}

			y2 = row + neighbourhood
			if y2 < 0 {
				y2 = 0
			}
			if y2 >= rows {
				y2 = rows - 1
			}

			for col = 0; col < columns; col++ {
				z = zVal[row][col]
				if z != nodata {
					x1 = col - neighbourhood
					if x1 < 0 {
						x1 = 0
					}
					if x1 >= columns {
						x1 = columns - 1
					}

					x2 = col + neighbourhood
					if x2 < 0 {
						x2 = 0
					}
					if x2 >= columns {
						x2 = columns - 1
					}

					N = IN[y2][x2] + IN[y1][x1] - IN[y1][x2] - IN[y2][x1]
					if N > 0 {
						sum = I[y2][x2] + I[y1][x1] - I[y1][x2] - I[y2][x1]
						sumSqr = I2[y2][x2] + I2[y1][x1] - I2[y1][x2] - I2[y2][x1]
						v = (sumSqr - (sum*sum)/float64(N)) / float64(N)
						if v > 0 {
							s = math.Sqrt(v)
							m = sum / float64(N)
							outValue = ((z - k) - m) / s
							if math.Abs(outValue) > maxVal[row][col] {
								maxVal[row][col] = math.Abs(outValue)
								if outValue >= 0 {
									//output.setValue(row, col, neighbourhood)
									scaleVal[row][col] = neighbourhood
								} else {
									//output.setValue(row, col, -neighbourhood)
									scaleVal[row][col] = -neighbourhood
								}
								//output2.setValue(row, col, outValue)
							}
						}
					}
				}
			}
			progress = int(100.0 * row / rowsLessOne)
			if progress%5 == 0 && progress != oldProgress {
				str = fmt.Sprintf("Loop %v of %v", loopNum, numLoops)
				printf("%s: %v%%\n", str, progress)
				oldProgress = progress
			}
		}
		loopNum++
	}

	// output the data
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = "blue_white_red.plt"
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = nodata
	config.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config.EPSGCode = inConfig.EPSGCode
	rout1, err := raster.CreateNewRaster(this.magOutputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config)
	if err != nil {
		println("Failed to write raster")
		return
	}

	config2 := raster.NewDefaultRasterConfig()
	config2.PreferredPalette = "blue_white_red.plt"
	config2.DataType = raster.DT_FLOAT32
	config2.NoDataValue = nodata
	config2.InitialValue = nodata
	config2.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config2.EPSGCode = inConfig.EPSGCode
	rout2, err := raster.CreateNewRaster(this.scaleOutputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config2)
	if err != nil {
		println("Failed to write raster")
		return
	}

	config.DisplayMinimum = -3.0
	config.DisplayMaximum = 3.0

	config2.PreferredPalette = "imhof1.plt"
	rout2.SetRasterConfig(config2)

	println("Saving the outputs...")
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			if maxVal[row][col] > -math.MaxFloat32 {
				if scaleVal[row][col] >= 0 {
					rout1.SetValue(row, col, maxVal[row][col])
					rout2.SetValue(row, col, float64(scaleVal[row][col]))
				} else {
					rout1.SetValue(row, col, -maxVal[row][col])
					rout2.SetValue(row, col, float64(-scaleVal[row][col]))
				}
			}
		}
	}

	rout1.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start2)
	rout1.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout1.AddMetadataEntry(fmt.Sprintf("Created by ElevationPercentile tool"))
	rout1.AddMetadataEntry(fmt.Sprintf("Min. window size: %v", (this.minNeighbourhood*2 + 1)))
	rout1.AddMetadataEntry(fmt.Sprintf("Max. window size: %v", (this.maxNeighbourhood*2 + 1)))
	rout1.AddMetadataEntry(fmt.Sprintf("Step size: %v", this.neighbourhoodStep))

	rout2.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	rout2.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout2.AddMetadataEntry(fmt.Sprintf("Created by ElevationPercentile tool"))
	rout2.AddMetadataEntry(fmt.Sprintf("Min. window size: %v", (this.minNeighbourhood*2 + 1)))
	rout2.AddMetadataEntry(fmt.Sprintf("Max. window size: %v", (this.maxNeighbourhood*2 + 1)))
	rout2.AddMetadataEntry(fmt.Sprintf("Step size: %v", this.neighbourhoodStep))

	overallTime := time.Since(start1)
	rout1.SetRasterConfig(config)
	rout1.Save()
	rout2.SetRasterConfig(config2)
	rout2.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
Пример #13
0
func (this *D8FlowAccumulation) Run() {
	start1 := time.Now()

	var z, zN, slope, maxSlope float64
	var progress, oldProgress, col, row, r, c, i, n int
	var dir int8
	//var b int8
	dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
	dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
	//inflowingVals := [8]int8{5, 6, 7, 8, 1, 2, 3, 4}

	println("Reading DEM data...")
	dem, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}
	rows := dem.Rows
	columns := dem.Columns
	rowsLessOne := rows - 1
	nodata := dem.NoDataValue
	cellSizeX := dem.GetCellSizeX()
	cellSizeY := dem.GetCellSizeY()
	diagDist := math.Sqrt(cellSizeX*cellSizeX + cellSizeY*cellSizeY)
	dist := [8]float64{diagDist, cellSizeX, diagDist, cellSizeY, diagDist, cellSizeX, diagDist, cellSizeY}
	println("Calculating pointer grid...")
	flowdir := make([][]int8, rows+2)
	numInflowing := make([][]int8, rows+2)
	for i = 0; i < rows+2; i++ {
		flowdir[i] = make([]int8, columns+2)
		numInflowing[i] = make([]int8, columns+2)
	}

	// calculate flow directions
	printf("\r                                                    ")
	printf("\rLoop (1 of 3): %v%%", 0)
	oldProgress = 0
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = dem.Value(row, col)
			flowdir[row+1][col+1] = 0
			//			numInflowing[row+1][col+1] = 0
			if z != nodata {
				maxSlope = math.Inf(-1)
				for n = 0; n < 8; n++ {
					zN = dem.Value(row+dY[n], col+dX[n])
					if zN != nodata {
						slope = (z - zN) / dist[n]

						if slope > maxSlope {
							maxSlope = slope
							dir = int8(n) + 1
						}
					}
				}
				if maxSlope > 0 {
					flowdir[row+1][col+1] = dir

					// increment the number of inflowing cells for the downslope receiver
					c = col + dX[dir-1] + 1
					r = row + dY[dir-1] + 1
					numInflowing[r][c]++

				} else {
					flowdir[row+1][col+1] = 0
				}
			} else {
				numInflowing[row+1][col+1] = 0
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rLoop (1 of 3): %v%%", progress)
			oldProgress = progress
		}
	}

	//	 calculate the number of inflowing neighbours and initialize the flow queue
	//	 with cells with no inflowing neighbours
	fq := newFlowQueue()
	//fq := newQueue(rows * columns / 2)
	numSolvedCells := 0
	println("")
	println("Calculating the number of inflow neighbours...")
	printf("\r                                                    ")
	printf("\rLoop (2 of 3): %v%%", 0)
	oldProgress = 0
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = dem.Value(row, col)
			if z != nodata {
				if numInflowing[row+1][col+1] == 0 {
					fq.push(row, col)
				}
			} else {
				numSolvedCells++
			}

		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rLoop (2 of 3): %v%%", progress)
			oldProgress = progress
		}
	}

	// create the output file
	config := raster.NewDefaultRasterConfig() //dem.GetRasterConfig()
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = 1
	config.PreferredPalette = "blueyellow.pal"
	config.CoordinateRefSystemWKT = dem.GetRasterConfig().CoordinateRefSystemWKT
	config.EPSGCode = dem.GetRasterConfig().EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		dem.North, dem.South, dem.East, dem.West, config)
	if err != nil {
		panic("Failed to write raster")
	}

	// perform the flow accumlation
	println("")
	println("Performing the flow accumulation...")
	numCellsTotal := rows * columns
	oldProgress = -1
	for fq.count > 0 {
		row, col = fq.pop()
		z = rout.Value(row, col)
		//value to send to it's neighbour
		//find it's downslope neighbour
		dir = flowdir[row+1][col+1]
		if dir > 0 {
			col += dX[dir-1]
			row += dY[dir-1]
			r = row + 1
			c = col + 1
			//update the output grids
			zN = rout.Value(row, col)
			rout.SetValue(row, col, zN+z)
			numInflowing[r][c]--
			//see if you can progress further downslope
			if numInflowing[r][c] == 0 {
				//numInflowing[r][c] = -1
				fq.push(row, col)
			}
		}
		numSolvedCells++
		progress = int(100.0 * numSolvedCells / numCellsTotal)
		if progress != oldProgress {
			printf("\rLoop (3 of 3): %v%%", progress)
			oldProgress = progress
		}
	}

	//	// perform the flow accumulation
	//	println("")
	//	println("Performing the flow accumulation...")
	//	printf("\r                                                    ")
	//	printf("\rLoop (3 of 3): %v%%", 0)
	// var trace bool
	//	oldProgress = 0
	//	for row = 0; row < rows; row++ {
	//		for col = 0; col < columns; col++ {
	//			z = dem.Value(row, col)
	//			if z != nodata {
	//				r = row + 1
	//				c = col + 1
	//				if numInflowing[r][c] == 0 {
	//					numInflowing[r][c] = -1
	//					trace = true

	//					for trace {
	//						z = rout.Value(r-1, c-1)
	//						//value to send to it's neighbour
	//						//find it's downslope neighbour
	//						dir = flowdir[r][c]
	//						if dir > 0 {
	//							c += dX[dir-1]
	//							r += dY[dir-1]
	//							//update the output grids
	//							zN = rout.Value(r-1, c-1)
	//							rout.SetValue(r-1, c-1, zN+z)
	//							numInflowing[r][c]--
	//							//see if you can progress further downslope
	//							if numInflowing[r][c] == 0 {
	//								numInflowing[r][c] = -1
	//								trace = true
	//							} else {
	//								trace = false
	//							}
	//						} else {
	//							trace = false
	//						}
	//					}
	//				}
	//			} else {
	//				rout.SetValue(row, col, nodata)
	//			}
	//		}
	//		progress = int(100.0 * row / rowsLessOne)
	//		if progress != oldProgress {
	//			printf("\rLoop (3 of 3): %v%%", progress)
	//			oldProgress = progress
	//		}
	//	}

	if this.lnTransform {
		println("")
		printf("\r                                                    ")
		printf("\rTransforming output: %v%%", 0)
		oldProgress = 0
		for row = 0; row < rows; row++ {
			for col = 0; col < columns; col++ {
				z = rout.Value(row, col)
				if z != nodata {
					rout.SetValue(row, col, math.Log(z))
				}
			}
			progress = int(100.0 * row / rowsLessOne)
			if progress != oldProgress {
				printf("\rTransforming output: %v%%", progress)
				oldProgress = progress
			}
		}
	}

	println("\nSaving data...")
	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start1)
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by D8FlowAccumulation tool"))
	rout.Save()

	println("Operation complete!")

	//value = fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	//println(value)

	overallTime := time.Since(start1)
	value := fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
Пример #14
0
func (this *FD8FlowAccum) Run() {
	start1 := time.Now()

	//var z, zN float64
	var progress, oldProgress int
	var col, row int
	//power := 2.0

	println("Reading DEM data...")
	dem, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}
	rows := dem.Rows
	columns := dem.Columns
	nodata := dem.NoDataValue
	println("Calculating pointer grid...")

	numCPUs := runtime.NumCPU()

	if numCPUs > 1 && this.parallel {
		numInflowing := structures.NewParallelRectangularArrayByte(rows, columns)
		//numInflowing := structures.NewRectangularArrayByte(rows, columns)

		outputData := structures.NewParallelRectangularArrayFloat64(rows, columns, nodata)
		//outputData := structures.NewRectangularArrayFloat64(rows, columns, nodata)
		//outputData.InitializeWithConstant(1.0)

		// parallel stuff
		println("Num CPUs:", numCPUs)
		c1 := make(chan bool)
		//c2 := make(chan bool)
		runtime.GOMAXPROCS(numCPUs)
		var wg sync.WaitGroup

		qg := NewQueueGroup(numCPUs)

		//		go func(rows, columns) {
		//			numCells := rows * columns
		//			progress, oldProgress := 0, -1
		//			numCellsCompleted := 0
		//			for numCellsCompleted < numCells {
		//				<-c2
		//				numCellsCompleted += increment
		//				if report {
		//					progress = int(100.0 * float64(numCellsCompleted) / float64(numCells))
		//					if progress != oldProgress {
		//						printf("\rLoop (2 of 2): %v%%", progress)
		//						oldProgress = progress
		//					}
		//				}
		//			}
		//		}(rows, columns)

		// calculate flow directions
		printf("\r                                                    ")
		printf("\rLoop (1 of 2): %v%%", 0)
		//var numSolvedCells int = 0
		startingRow := 0
		var rowBlockSize int = rows / numCPUs

		k := 0
		for startingRow < rows {
			endingRow := startingRow + rowBlockSize
			if endingRow >= rows {
				endingRow = rows - 1
			}
			wg.Add(1)
			go func(rowSt, rowEnd, k int) {
				defer wg.Done()
				var z, zN float64
				var j byte
				dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
				dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
				for row := rowSt; row <= rowEnd; row++ {
					byteData := make([]byte, columns)
					floatData := make([]float64, columns)
					for col := 0; col < columns; col++ {
						z = dem.Value(row, col)
						if z != nodata {
							j = 0
							for n := 0; n < 8; n++ {
								zN = dem.Value(row+dY[n], col+dX[n])
								if zN > z && zN != nodata {
									j++
								}
							}
							byteData[col] = j
							//numInflowing.SetValue(row, col, j)
							if j == 0 {
								qg.push(row, col, k)
							}
							floatData[col] = 1.0
						} else {
							//c2 <- true // update the number of solved cells
							//outputData.SetValue(row, col, nodata)
							floatData[col] = nodata
						}
					}
					numInflowing.SetRowData(row, byteData)
					outputData.SetRowData(row, floatData)
					c1 <- true // row completed
				}

			}(startingRow, endingRow, k)
			startingRow = endingRow + 1
			k++
		}

		oldProgress = -1
		rowsLessOne := rows - 1
		for rowsCompleted := 0; rowsCompleted < rows; rowsCompleted++ {
			<-c1 // a row has successfully completed
			progress = int(100.0 * float64(rowsCompleted) / float64(rowsLessOne))
			if progress != oldProgress {
				printf("\rLoop (1 of 2): %v%%", progress)
				oldProgress = progress
			}
		}

		wg.Wait()

		// create the output file
		config := raster.NewDefaultRasterConfig() //dem.GetRasterConfig()
		config.DataType = raster.DT_FLOAT32
		config.NoDataValue = nodata
		config.InitialValue = 1
		config.PreferredPalette = "blueyellow.pal"
		config.CoordinateRefSystemWKT = dem.GetRasterConfig().CoordinateRefSystemWKT
		config.EPSGCode = dem.GetRasterConfig().EPSGCode
		rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
			dem.North, dem.South, dem.East, dem.West, config)
		if err != nil {
			panic("Failed to write raster")
		}

		// perform the flow accumlation
		//var numSolvedCells int32 = 0
		println("")
		println("Performing the flow accumulation...")
		for k := 0; k < numCPUs; k++ {
			wg.Add(1)
			go func(k int) {
				defer wg.Done()
				dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
				dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
				//var numCellsTotal float64 = float64(rows * columns)
				var faValue float64
				var totalWeights float64
				//var progress, oldProgress int = 0, -1
				var z, zN float64
				var col, row, r, c, n int
				power := 2.0
				for qg.length(k) > 0 {
					row, col = qg.pop(k)
					z = dem.Value(row, col)
					faValue = outputData.Value(row, col)
					// calculate the weights
					totalWeights = 0
					weights := [8]float64{0, 0, 0, 0, 0, 0, 0, 0}
					downslope := [8]bool{false, false, false, false, false, false, false, false}
					for n = 0; n < 8; n++ {
						zN = dem.Value(row+dY[n], col+dX[n])
						if zN < z && zN != nodata {
							weights[n] = math.Pow(z-zN, power)
							totalWeights += weights[n]
							downslope[n] = true
						}
					}

					// now perform the neighbour accumulation
					for n = 0; n < 8; n++ {
						r = row + dY[n]
						c = col + dX[n]
						//zN = dem.Value(r, c)
						if downslope[n] {
							outputData.Increment(r, c, faValue*(weights[n]/totalWeights))
							p := numInflowing.DecrementAndReturn(r, c, 1.0)

							//see if you can progress further downslope
							if p == 0 {
								qg.push(r, c, k)
							}
						}
					}
					//c2 <- true
					//					atomic.AddInt32(&numSolvedCells, 1)
					//					progress = int(100.0 * float64(numSolvedCells) / numCellsTotal)
					//					if progress != oldProgress {
					//						printf("\rLoop (2 of 2): %v%%", progress)
					//						oldProgress = progress
					//					}
				}
			}(k)
		}

		//		oldProgress = -1
		//		for rowsCompleted := 0; rowsCompleted < rows; rowsCompleted++ {
		//			<-c1 // a row has successfully completed
		//			progress = int(100.0 * float64(rowsCompleted) / float64(rowsLessOne))
		//			if progress != oldProgress {
		//				printf("\rLoop (1 of 2): %v%%", progress)
		//				oldProgress = progress
		//			}
		//		}

		wg.Wait()

		if this.lnTransform {
			println("")
			printf("\r                                                    ")
			printf("\rTransforming output: %v%%", 0)
			oldProgress = 0
			//var z float64
			var rowsLessOne int32 = int32(rows - 1)
			for row = 0; row < rows; row++ {
				floatData := outputData.GetRowData(row)
				for col = 0; col < columns; col++ {
					//z = rout.Value(row, col)
					//z = outputData.Value(row, col)
					if floatData[col] != nodata {
						//rout.SetValue(row, col, math.Log(z))
						rout.SetValue(row, col, math.Log(floatData[col]))
					}
				}

				progress = int(100.0 * int32(row) / rowsLessOne)
				if progress != oldProgress {
					printf("\rTransforming output: %v%%", progress)
					oldProgress = progress
				}
			}
		} else {
			println("")
			printf("\r                                                    ")
			printf("\rOutputing data: %v%%", 0)
			oldProgress = 0
			//var z float64
			var rowsLessOne int32 = int32(rows - 1)
			for row = 0; row < rows; row++ {
				floatData := outputData.GetRowData(row)
				for col = 0; col < columns; col++ {
					//z = rout.Value(row, col)
					//z = outputData.Value(row, col)
					//					if floatData[col] != nodata {
					//						rout.SetValue(row, col, z)
					//					} else {
					//						rout.SetValue(row, col, nodata)
					//					}
					rout.SetValue(row, col, floatData[col])
				}
				progress = int(100.0 * int32(row) / rowsLessOne)
				if progress != oldProgress {
					printf("\rOutputing data: %v%%", progress)
					oldProgress = progress
				}
			}
		}

		println("\nSaving data...")
		rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
		elapsed := time.Since(start1)
		rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
		rout.AddMetadataEntry(fmt.Sprintf("Created by D8FlowAccumulation tool"))
		rout.Save()
	} else {
		numInflowing := structures.NewRectangularArrayByte(rows, columns)

		outputData := structures.NewRectangularArrayFloat64(rows, columns, nodata)
		outputData.InitializeWithConstant(1.0)

		q := newQueue()

		// calculate flow directions
		printf("\r                                                    ")
		printf("\rLoop (1 of 2): %v%%", 0)
		var numSolvedCells int32 = 0
		var rowsCompleted int32 = 0
		oldProgress = 0

		var z, zN float64
		var j byte
		var rowsLessOne int32 = int32(rows - 1)
		var progress, oldProgress int32 = 0, -1
		dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
		dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
		for row := 0; row <= rows; row++ {
			for col := 0; col < columns; col++ {
				z = dem.Value(row, col)
				if z != nodata {
					j = 0
					for n := 0; n < 8; n++ {
						zN = dem.Value(row+dY[n], col+dX[n])
						if zN > z && zN != nodata {
							j++
						}
					}
					numInflowing.SetValue(row, col, j)
					if j == 0 {
						q.push(row, col)
					}
				} else {
					numSolvedCells++
					outputData.SetValue(row, col, nodata)
				}
			}
			//numInflowing.SetRowData(row, byteData)
			rowsCompleted++
			progress = int32(100.0 * rowsCompleted / rowsLessOne)
			if progress != oldProgress {
				printf("\rLoop (1 of 2): %v%%", progress)
				oldProgress = progress
			}
		}

		// create the output file
		config := raster.NewDefaultRasterConfig() //dem.GetRasterConfig()
		config.DataType = raster.DT_FLOAT32
		config.NoDataValue = nodata
		config.InitialValue = 1
		config.PreferredPalette = "blueyellow.pal"
		config.CoordinateRefSystemWKT = dem.GetRasterConfig().CoordinateRefSystemWKT
		config.EPSGCode = dem.GetRasterConfig().EPSGCode
		rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
			dem.North, dem.South, dem.East, dem.West, config)
		if err != nil {
			panic("Failed to write raster")
		}

		// perform the flow accumlation
		println("")
		println("Performing the flow accumulation...")

		var numCellsTotal float64 = float64(rows * columns)
		var faValue float64
		//var faValueN float64
		var totalWeights float64
		progress, oldProgress = 0, -1
		var col, row, r, c, n int
		power := 2.0
		for q.count > 0 {
			row, col = q.pop()
			z = dem.Value(row, col)
			//faValue = rout.Value(row, col)
			faValue = outputData.Value(row, col)
			// calculate the weights
			totalWeights = 0
			weights := [8]float64{0, 0, 0, 0, 0, 0, 0, 0}
			downslope := [8]bool{false, false, false, false, false, false, false, false}
			for n = 0; n < 8; n++ {
				zN = dem.Value(row+dY[n], col+dX[n])
				if zN < z && zN != nodata {
					weights[n] = math.Pow(z-zN, power)
					totalWeights += weights[n]
					downslope[n] = true
				}
			}

			// now perform the neighbour accumulation
			for n = 0; n < 8; n++ {
				r = row + dY[n]
				c = col + dX[n]
				//zN = dem.Value(r, c)
				if downslope[n] {
					//faValueN = rout.Value(r, c)
					//faValueN = outputData.Value(r, c)
					// update the output grids
					//rout.SetValue(r, c, faValueN+faValue*(weights[n]/totalWeights))
					outputData.Increment(r, c, faValue*(weights[n]/totalWeights))
					numInflowing.Decrement(r, c)

					//see if you can progress further downslope
					//if numInflowing[r+1][c+1] == 0 {
					if numInflowing.Value(r, c) == 0 {
						//qs[k].push(r, c)
						q.push(r, c)
					}
				}
			}

			numSolvedCells++
			progress = int32(100.0 * float64(numSolvedCells) / numCellsTotal)
			if progress != oldProgress {
				printf("\rLoop (2 of 2): %v%%", progress)
				oldProgress = progress
			}
		}

		if this.lnTransform {
			println("")
			printf("\r                                                    ")
			printf("\rTransforming output: %v%%", 0)
			oldProgress = 0
			var z float64
			var rowsLessOne int32 = int32(rows - 1)
			for row = 0; row < rows; row++ {
				for col = 0; col < columns; col++ {
					//z = rout.Value(row, col)
					z = outputData.Value(row, col)
					if z != nodata {
						rout.SetValue(row, col, math.Log(z))
					} else {
						rout.SetValue(row, col, nodata)
					}
				}
				progress = int32(100.0 * int32(row) / rowsLessOne)
				if progress != oldProgress {
					printf("\rTransforming output: %v%%", progress)
					oldProgress = progress
				}
			}
		} else {
			println("")
			printf("\r                                                    ")
			printf("\rOutputing data: %v%%", 0)
			oldProgress = 0
			var z float64
			var rowsLessOne int32 = int32(rows - 1)
			for row = 0; row < rows; row++ {
				for col = 0; col < columns; col++ {
					//z = rout.Value(row, col)
					z = outputData.Value(row, col)
					if z != nodata {
						rout.SetValue(row, col, z)
					} else {
						rout.SetValue(row, col, nodata)
					}
				}
				progress = int32(100.0 * int32(row) / rowsLessOne)
				if progress != oldProgress {
					printf("\rOutputing data: %v%%", progress)
					oldProgress = progress
				}
			}
		}

		println("\nSaving data...")
		rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
		elapsed := time.Since(start1)
		rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
		rout.AddMetadataEntry(fmt.Sprintf("Created by D8FlowAccumulation tool"))
		rout.Save()
	}

	println("Operation complete!")

	overallTime := time.Since(start1)
	value := fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
Пример #15
0
func (this *Quantiles) Run() {
	start1 := time.Now()

	var progress, oldProgress, col, row, i, bin int
	var z float64

	println("Reading raster data...")
	rin, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}

	start2 := time.Now()

	rows := rin.Rows
	columns := rin.Columns
	rowsLessOne := rows - 1
	nodata := rin.NoDataValue
	inConfig := rin.GetRasterConfig()
	minValue := rin.GetMinimumValue()
	maxValue := rin.GetMaximumValue()
	valueRange := math.Ceil(maxValue - minValue)

	println("Calculating quantiles...")

	highResNumBins := 10000
	highResBinSize := valueRange / float64(highResNumBins)

	primaryHisto := make([]int, highResNumBins)
	numValidCells := 0
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z != nodata {
				bin = int(math.Floor((z - minValue) / highResBinSize))
				if bin >= highResNumBins {
					bin = highResNumBins - 1
				}
				primaryHisto[bin]++
				numValidCells++
			}
		}
	}

	for i = 1; i < highResNumBins; i++ {
		primaryHisto[i] += primaryHisto[i-1]
	}

	cdf := make([]float64, highResNumBins)
	for i = 0; i < highResNumBins; i++ {
		cdf[i] = 100.0 * float64(primaryHisto[i]) / float64(numValidCells)
	}

	quantileProportion := 100.0 / float64(this.numBins)

	for i = 0; i < highResNumBins; i++ {
		primaryHisto[i] = int(math.Floor(cdf[i] / quantileProportion))
		if primaryHisto[i] == this.numBins {
			primaryHisto[i] = this.numBins - 1
		}
	}

	// create the output raster
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = inConfig.PreferredPalette
	config.DataType = raster.DT_INT16
	config.NoDataValue = nodata
	config.InitialValue = nodata
	config.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config.EPSGCode = inConfig.EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config)
	if err != nil {
		println("Failed to write raster")
		return
	}

	printf("\r                                                           ")

	oldProgress = -1
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z != nodata {
				i = int(math.Floor((z - minValue) / highResBinSize))
				if i >= highResNumBins {
					i = highResNumBins - 1
				}
				bin = primaryHisto[i]

				rout.SetValue(row, col, float64(bin+1))
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress != oldProgress {
			printf("\rProgress: %v%%", progress)
			oldProgress = progress
		}
	}

	printf("\r                                                           ")
	printf("\rSaving data...\n")

	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by Quantiles with %v bins", this.numBins))
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
Пример #16
0
func (this *Aspect) Run() {
	start1 := time.Now()

	var progress, oldProgress int

	println("Reading raster data...")
	rin, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}

	start2 := time.Now()

	rows := rin.Rows
	columns := rin.Columns
	rowsLessOne := rows - 1
	nodata := rin.NoDataValue
	inConfig := rin.GetRasterConfig()
	gridRes := (rin.GetCellSizeX() + rin.GetCellSizeY()) / 2.0
	eightGridRes := 8 * gridRes
	const radToDeg float64 = 180.0 / math.Pi
	rin.GetRasterConfig()

	// create the output raster
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = "circular_bw.pal"
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = nodata
	config.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config.EPSGCode = inConfig.EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config)
	if err != nil {
		println("Failed to write raster")
		return
	}

	zConvFactor := 1.0
	if rin.IsInGeographicCoordinates() {
		// calculate a new z-conversion factor
		midLat := (rin.North - rin.South) / 2.0
		if midLat <= 90 && midLat >= -90 {
			zConvFactor = 1.0 / (113200 * math.Cos(math.Pi/180.0*midLat))
		}
	}

	numCPUs := runtime.NumCPU()
	c1 := make(chan bool)
	runtime.GOMAXPROCS(numCPUs)
	var wg sync.WaitGroup

	// calculate aspect
	printf("\r                                                    ")
	printf("\rProgress: %v%%", 0)
	//var numSolvedCells int = 0
	startingRow := 0
	rowBlockSize := rows / numCPUs

	k := 0
	for startingRow < rows {
		endingRow := startingRow + rowBlockSize
		if endingRow >= rows {
			endingRow = rows - 1
		}
		wg.Add(1)
		go func(rowSt, rowEnd, k int) {
			defer wg.Done()
			var z, zN, fy, fx, value float64
			dX := [8]int{1, 1, 1, 0, -1, -1, -1, 0}
			dY := [8]int{-1, 0, 1, 1, 1, 0, -1, -1}
			N := [8]float64{}
			for row := rowSt; row <= rowEnd; row++ {
				floatData := make([]float64, columns)
				for col := 0; col < columns; col++ {
					z = rin.Value(row, col)
					if z != nodata {
						z = z * zConvFactor
						for n := 0; n < 8; n++ {
							zN = rin.Value(row+dY[n], col+dX[n])
							if zN != nodata {
								N[n] = zN * zConvFactor
							} else {
								N[n] = z
							}
						}

						fy = (N[6] - N[4] + 2*(N[7]-N[3]) + N[0] - N[2]) / eightGridRes
						fx = (N[2] - N[4] + 2*(N[1]-N[5]) + N[0] - N[6]) / eightGridRes

						if fx != 0 {
							value = 180 - math.Atan(fy/fx)*radToDeg + 90*(fx/math.Abs(fx))
							floatData[col] = value
						} else {
							floatData[col] = -1.0
						}
					} else {
						floatData[col] = nodata
					}
				}
				rout.SetRowValues(row, floatData)
				c1 <- true // row completed
			}
		}(startingRow, endingRow, k)
		startingRow = endingRow + 1
		k++
	}

	oldProgress = 0
	for rowsCompleted := 0; rowsCompleted < rows; rowsCompleted++ {
		<-c1 // a row has successfully completed
		progress = int(100.0 * float64(rowsCompleted) / float64(rowsLessOne))
		if progress != oldProgress {
			printf("\rProgress: %v%%", progress)
			oldProgress = progress
		}
	}

	wg.Wait()

	printf("\r                                                           ")
	printf("\rSaving data...\n")

	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by Slope"))
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}
Пример #17
0
func (this *ElevationPercentile) Run() {
	start1 := time.Now()

	var progress, oldProgress, col, row int
	var i, j, bin, highResNumBins uint32
	var z, percentile float64
	var N, numLess, binRunningTotal uint32
	var x1, x2, y1, y2 int
	var a, b, c, d, e, f, g, rowSum []uint32

	println("Reading raster data...")
	rin, err := raster.CreateRasterFromFile(this.inputFile)
	if err != nil {
		println(err.Error())
	}

	start2 := time.Now()

	rows := rin.Rows
	columns := rin.Columns
	rowsLessOne := rows - 1
	nodata := rin.NoDataValue
	inConfig := rin.GetRasterConfig()
	minValue := rin.GetMinimumValue()
	maxValue := rin.GetMaximumValue()
	valueRange := math.Ceil(maxValue - minValue)

	highResNumBins = 10000
	highResBinSize := valueRange / float64(highResNumBins)

	primaryHisto := make([]uint32, highResNumBins)
	var numValidCells uint32 = 0
	for row = 0; row < rows; row++ {
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z != nodata {
				i = uint32(math.Floor((z - minValue) / highResBinSize))
				//				if i == this.numBins {
				//					i = this.numBins - 1
				//				}
				if i >= highResNumBins {
					i = highResNumBins - 1
				}
				primaryHisto[i]++
				numValidCells++
			}
		}
	}
	quantileProportion := numValidCells / this.numBins
	binNumMap := make([]uint32, highResNumBins)
	binTotal := make([]uint32, this.numBins)
	valProbMap := make([]float64, highResNumBins)
	binRunningTotal = 0
	bin = 0
	for i = 0; i < highResNumBins; i++ {
		binRunningTotal += primaryHisto[i]
		if binRunningTotal > quantileProportion {
			if bin < this.numBins-1 {
				bin++
				binRunningTotal = primaryHisto[i]
			}
		}
		binNumMap[i] = bin
		binTotal[bin] += primaryHisto[i]
		valProbMap[i] = float64(binRunningTotal)
		//primaryHisto[i] += primaryHisto[i-1]
	}

	for i = 0; i < highResNumBins; i++ {
		valProbMap[i] = valProbMap[i] / float64(binTotal[binNumMap[i]])
	}

	//	for i = 0; i < highResNumBins; i++ {
	//		primaryHisto[i] = uint32(math.Floor(cdf[i] / quantileProportion))
	//		if primaryHisto[i] == this.numBins {
	//			primaryHisto[i] = this.numBins - 1
	//		}
	//	}

	//	binLowerValue := make([]float64, this.numBins)
	//	binSize := make([]float64, this.numBins)
	//	for i = 0; i < this.numBins; i++ {
	//		binLowerValue[i] = minValue + float64(i)*binSize
	//	}
	//	bin = -1
	//	for i = 0; i < highResNumBins; i++ {
	//		if primaryHisto[i] > bin {
	//			bin = primaryHisto[i]
	//			// what elevation does this bin correpsond to?
	//			binLowerValue[bin] = minValue + float64(i)*highResBinSize
	//			if bin > 0 {
	//				binSize[bin-1] = (minValue + float64(i-1)*highResBinSize) - binLowerValue[bin-1]
	//			}
	//		}
	//	}
	//	binSize[this.numBins-1] = maxValue - binLowerValue[this.numBins-1]

	//	for i = 0; i < this.numBins; i++ {
	//		println(binLowerValue[i], binSize[i])
	//	}

	histoImage := make([][][]uint32, rows)

	oldProgress = -1
	for row = 0; row < rows; row++ {
		histoImage[row] = make([][]uint32, columns)
		rowSum = make([]uint32, this.numBins)
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z != nodata {
				//bin = int(math.Floor((z - minValue) / binSize))
				i = uint32(math.Floor((z - minValue) / highResBinSize))
				if i >= highResNumBins {
					i = highResNumBins - 1
				}
				bin = binNumMap[i]
				rowSum[bin]++
			}
			histoImage[row][col] = make([]uint32, this.numBins)
			if row > 0 {
				for i = 0; i < this.numBins; i++ {
					histoImage[row][col][i] = rowSum[i] + histoImage[row-1][col][i]
				}
			} else {
				for i = 0; i < this.numBins; i++ {
					histoImage[row][col][i] = rowSum[i]
				}
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress%5 == 0 && progress != oldProgress {
			printf("Calculating integral histogram (1 of 2): %v%%\n", progress)
			oldProgress = progress
		}
	}

	// create the output raster
	config := raster.NewDefaultRasterConfig()
	config.PreferredPalette = "blue_white_red.plt"
	config.DataType = raster.DT_FLOAT32
	config.NoDataValue = nodata
	config.InitialValue = nodata
	config.CoordinateRefSystemWKT = inConfig.CoordinateRefSystemWKT
	config.EPSGCode = inConfig.EPSGCode
	rout, err := raster.CreateNewRaster(this.outputFile, rows, columns,
		rin.North, rin.South, rin.East, rin.West, config)
	if err != nil {
		println("Failed to write raster")
		return
	}

	e = make([]uint32, this.numBins)
	f = make([]uint32, this.numBins)
	g = make([]uint32, this.numBins)

	oldProgress = -1
	for row = 0; row < rows; row++ {
		y1 = row - this.neighbourhoodSize
		if y1 < 0 {
			y1 = 0
		}
		if y1 >= rows {
			y1 = rows - 1
		}

		y2 = row + this.neighbourhoodSize
		if y2 < 0 {
			y2 = 0
		}
		if y2 >= rows {
			y2 = rows - 1
		}
		for col = 0; col < columns; col++ {
			z = rin.Value(row, col)
			if z != nodata {
				//bin = int(math.Floor((z - minValue) / binSize))
				j = uint32(math.Floor((z - minValue) / highResBinSize))
				if j >= highResNumBins {
					j = highResNumBins - 1
				}
				bin = binNumMap[j]

				x1 = col - this.neighbourhoodSize
				if x1 < 0 {
					x1 = 0
				}
				if x1 >= columns {
					x1 = columns - 1
				}

				x2 = col + this.neighbourhoodSize
				if x2 < 0 {
					x2 = 0
				}
				if x2 >= columns {
					x2 = columns - 1
				}

				a = histoImage[y2][x2]
				b = histoImage[y1][x1]
				c = histoImage[y1][x2]
				d = histoImage[y2][x1]

				for i = 0; i < this.numBins; i++ {
					e[i] = a[i] + b[i]
				}
				for i = 0; i < this.numBins; i++ {
					f[i] = e[i] - c[i]
				}
				for i = 0; i < this.numBins; i++ {
					g[i] = f[i] - d[i]
				}

				N = 0
				numLess = 0
				for i = 0; i < this.numBins; i++ {
					N += g[i]
					if i < bin {
						numLess += g[i]
					}
				}

				if N > 0 {
					//percentile = 100.0 * float64(g[bin]) / float64(N) // only used for accuracy assessment
					percentile = 100.0 * (float64(numLess) + valProbMap[j]*float64(g[bin])) / float64(N)
					//percentile = 100.0 * (float64(numLess) + (z-binLowerValue[bin])/binSize[bin]*float64(g[bin])) / float64(N)
					rout.SetValue(row, col, percentile)
				}
			}
		}
		progress = int(100.0 * row / rowsLessOne)
		if progress%5 == 0 && progress != oldProgress {
			printf("Performing analysis (2 of 2): %v%%\n", progress)
			oldProgress = progress
		}
	}

	println("Saving data...")

	elapsed := time.Since(start2)
	rout.AddMetadataEntry(fmt.Sprintf("Created on %s", time.Now().Local()))
	rout.AddMetadataEntry(fmt.Sprintf("Elapsed Time: %v", elapsed))
	rout.AddMetadataEntry(fmt.Sprintf("Created by ElevationPercentile tool"))
	rout.AddMetadataEntry(fmt.Sprintf("Window size: %v", (this.neighbourhoodSize*2 + 1)))
	rout.AddMetadataEntry(fmt.Sprintf("Num. histogram bins: %v", this.numBins))
	config.DisplayMinimum = 0
	config.DisplayMaximum = 100
	rout.SetRasterConfig(config)
	rout.Save()

	println("Operation complete!")

	value := fmt.Sprintf("Elapsed time (excluding file I/O): %s", elapsed)
	println(value)

	overallTime := time.Since(start1)
	value = fmt.Sprintf("Elapsed time (total): %s", overallTime)
	println(value)
}