Skip to content

Commit 5a82f51

Browse files
committed
Updates
1 parent a0746ec commit 5a82f51

File tree

6 files changed

+204
-47
lines changed

6 files changed

+204
-47
lines changed

.DS_Store

0 Bytes
Binary file not shown.

whitebox-common/src/plugins.rs

+1
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ impl<S: ToString> From<S> for InvocationError {
2323
}
2424

2525
#[derive(Copy, Clone)]
26+
#[warn(improper_ctypes_definitions)]
2627
pub struct PluginDeclaration {
2728
pub rustc_version: &'static str,
2829
pub core_version: &'static str,

whitebox-plugins/src/repair_stream_vector_topology/main.rs

+54-41
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
/*
22
Authors: Prof. John Lindsay
33
Created: 03/08/2021 (oringinally in Whitebox Toolset Extension)
4-
Last Modified: 19/05/2023
4+
Last Modified: 04/10/2023
55
License: MIT
66
*/
77

@@ -46,7 +46,7 @@ const EPSILON: f64 = std::f64::EPSILON;
4646
///
4747
/// ![](../../doc_img/RepairStreamVectorTopology.png)
4848
///
49-
/// The user may optinally specify the name of the input vector stream network (`--input`) and the output file
49+
/// The user may optionally specify the name of the input vector stream network (`--input`) and the output file
5050
/// (`--output`). Note that if an input file is not specified by the user, the tool will search for all vector
5151
/// files (*.shp) files contained within the current working directory. This feature can be very useful when
5252
/// you need to process a large number of stream files contained within a single directory. The tool will
@@ -57,6 +57,11 @@ const EPSILON: f64 = std::f64::EPSILON;
5757
/// data, however, if the input are in geographic coordinates (latitude and longitude), then specifying a
5858
/// small valued snap distance is advisable.
5959
///
60+
/// Additionally, the tool possesses two Boolean flags, `--reverse_backward_arcs` and `--correct_nonconfluence_joins`
61+
/// which determine whether the tool will correct backward arcs (i.e., line segements that are oriented
62+
/// in the reverse direction to the streamflow) and non-confluence joins (i.e., upstream/downstream line
63+
/// segments that are not joined at confluence locations).
64+
///
6065
/// Notice that the attributes of the input layer will not be
6166
/// carried over to the output file because there is not a one-for-one feature correspondence between the
6267
/// two files due to the joins and splits of stream segments. Instead the output attribute table will
@@ -105,10 +110,12 @@ fn help() {
105110
version Prints the tool version information.
106111
107112
The following flags can be used with the 'run' command:
108-
--routes Name of the input routes vector file.
109-
-o, --output Name of the output HTML file.
110-
--length Maximum segment length (m).
111-
--dist Search distance, in grid cells, used in visibility analysis.
113+
--routes Name of the input routes vector file.
114+
-o, --output Name of the output HTML file.
115+
--length Maximum segment length (m).
116+
--dist Search distance, in grid cells, used in visibility analysis.
117+
--reverse_backward_arcs Boolean flag determines whether backward arcs are corrected.
118+
--correct_nonconfluence_joins Boolean flag determines whether non-confluence joins are corrected.
112119
113120
Input/output file names can be fully qualified, or can rely on the
114121
working directory contained in the WhiteboxTools settings.json file.
@@ -153,6 +160,7 @@ fn run(args: &Vec<String>) -> Result<(), std::io::Error> {
153160
let mut output_file: String = String::new();
154161
let mut snap_dist = 1.0;
155162
let mut reverse_backward_arcs = false;
163+
let mut correct_nonconfluence_joins = false;
156164
if args.len() <= 1 {
157165
return Err(Error::new(
158166
ErrorKind::InvalidInput,
@@ -197,6 +205,10 @@ fn run(args: &Vec<String>) -> Result<(), std::io::Error> {
197205
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
198206
reverse_backward_arcs = true;
199207
}
208+
} else if flag_val == "-correct_nonconfluence_joins" {
209+
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
210+
correct_nonconfluence_joins = true;
211+
}
200212
}
201213
}
202214

@@ -368,53 +380,54 @@ fn run(args: &Vec<String>) -> Result<(), std::io::Error> {
368380
let mut num_polylines = polylines.len(); // will be updated after the joins.
369381

370382

371-
372-
373-
// Find all of the segments that can be joined because they link at non-confluences.
374383
let endnode_tree = RTree::bulk_load(end_nodes);
375384
let precision = EPSILON * 10f64;
376385
let mut p1: Point2D;
377386
let mut connections = vec![[num_polylines, num_polylines]; num_polylines];
378387
let mut connected_polyline: usize;
379388
let mut num_neighbours: usize;
380-
for fid in 0..num_polylines {
381-
// fid = polylines[poly_id].id1;
382-
p1 = polylines[fid].get_first_node();
383-
let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision);
389+
390+
if correct_nonconfluence_joins {
391+
// Find all of the segments that can be joined because they link at non-confluences.
392+
for fid in 0..num_polylines {
393+
// fid = polylines[poly_id].id1;
394+
p1 = polylines[fid].get_first_node();
395+
let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision);
384396

385-
connected_polyline = num_polylines;
386-
num_neighbours = 0;
387-
for p in ret {
388-
if p.data != fid {
389-
connected_polyline = p.data;
390-
num_neighbours += 1;
397+
connected_polyline = num_polylines;
398+
num_neighbours = 0;
399+
for p in ret {
400+
if p.data != fid {
401+
connected_polyline = p.data;
402+
num_neighbours += 1;
403+
}
404+
}
405+
if num_neighbours == 1 {
406+
connections[fid][0] = connected_polyline;
391407
}
392-
}
393-
if num_neighbours == 1 {
394-
connections[fid][0] = connected_polyline;
395-
}
396408

397-
p1 = polylines[fid].get_last_node();
398-
let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision);
409+
p1 = polylines[fid].get_last_node();
410+
let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision);
399411

400-
connected_polyline = num_polylines;
401-
num_neighbours = 0;
402-
for p in ret {
403-
if p.data != fid {
404-
connected_polyline = p.data;
405-
num_neighbours += 1;
412+
connected_polyline = num_polylines;
413+
num_neighbours = 0;
414+
for p in ret {
415+
if p.data != fid {
416+
connected_polyline = p.data;
417+
num_neighbours += 1;
418+
}
419+
}
420+
if num_neighbours == 1 {
421+
connections[fid][1] = connected_polyline;
406422
}
407-
}
408-
if num_neighbours == 1 {
409-
connections[fid][1] = connected_polyline;
410-
}
411423

412-
if configurations.verbose_mode && inputs.len() == 1 {
413-
progress = (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize;
414-
let mut old_progress = old_progress.lock().unwrap();
415-
if progress != *old_progress {
416-
println!("Looking for joins in arcs: {}%", progress);
417-
*old_progress = progress;
424+
if configurations.verbose_mode && inputs.len() == 1 {
425+
progress = (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize;
426+
let mut old_progress = old_progress.lock().unwrap();
427+
if progress != *old_progress {
428+
println!("Looking for joins in arcs: {}%", progress);
429+
*old_progress = progress;
430+
}
418431
}
419432
}
420433
}

whitebox-plugins/src/repair_stream_vector_topology/repair_stream_vector_topology.json

+8
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,14 @@
3838
"parameter_type": "Boolean",
3939
"default_value": "true",
4040
"optional": true
41+
},
42+
{
43+
"name": "Correct non-confluence joins?",
44+
"flags": ["--correct_nonconfluence_joins"],
45+
"description": "Optional flag to request that non-confluence joins be corrected.",
46+
"parameter_type": "Boolean",
47+
"default_value": "true",
48+
"optional": true
4149
}
4250
]
4351
}

whitebox-raster/src/geotiff/mod.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -3009,10 +3009,10 @@ pub fn write_geotiff<'a>(r: &'a mut Raster) -> Result<(), Error> {
30093009

30103010
// tGTCitationGeoKey (1026)
30113011
let mut v = String::from(
3012-
geographic_type_map
3012+
*geographic_type_map
30133013
.get(&r.configs.epsg_code)
30143014
.unwrap()
3015-
.clone(),
3015+
// .clone(),
30163016
);
30173017
v.push_str("|");
30183018
v = v.replace("_", " ");
@@ -3087,10 +3087,10 @@ pub fn write_geotiff<'a>(r: &'a mut Raster) -> Result<(), Error> {
30873087

30883088
// PCSCitationGeoKey (3073)
30893089
let mut v = String::from(
3090-
projected_cs_type_map
3090+
*projected_cs_type_map
30913091
.get(&r.configs.epsg_code)
30923092
.unwrap()
3093-
.clone(),
3093+
// .clone(),
30943094
);
30953095
v.push_str("|");
30963096
v = v.replace("_", " ");

whitebox-tools-app/src/tools/hydro_analysis/flow_accum_full_workflow.rs

+137-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
This tool is part of the WhiteboxTools geospatial analysis library.
33
Authors: Dr. John Lindsay
44
Created: 28/06/2017
5-
Last Modified: 18/10/2019
5+
Last Modified: 26/10/2023
66
License: MIT
77
88
NOTES: This tool provides a full workflow D8 flow operation. This includes removing depressions, calculating
@@ -92,6 +92,15 @@ impl FlowAccumulationFullWorkflow {
9292
optional: true,
9393
});
9494

95+
parameters.push(ToolParameter {
96+
name: "Corrected flow pointer".to_owned(),
97+
flags: vec!["--correct_pntr".to_owned()],
98+
description: "Optional flag to apply corerections that limit potential artifacts in the flow pointer.".to_owned(),
99+
parameter_type: ParameterType::Boolean,
100+
default_value: None,
101+
optional: true,
102+
});
103+
95104
parameters.push(ToolParameter {
96105
name: "Log-transform the output?".to_owned(),
97106
flags: vec!["--log".to_owned()],
@@ -183,6 +192,7 @@ impl WhiteboxTool for FlowAccumulationFullWorkflow {
183192
let mut pntr_file = String::new();
184193
let mut accum_file = String::new();
185194
let mut out_type = String::from("sca");
195+
let mut correct_pntr = false;
186196
let mut log_transform = false;
187197
let mut clip_max = false;
188198
let mut esri_style = false;
@@ -246,6 +256,10 @@ impl WhiteboxTool for FlowAccumulationFullWorkflow {
246256
} else {
247257
out_type = String::from("ca");
248258
}
259+
} else if vec[0].to_lowercase() == "-correct_pntr" || vec[0].to_lowercase() == "--correct_pntr" {
260+
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
261+
correct_pntr = true;
262+
}
249263
} else if vec[0].to_lowercase() == "-log" || vec[0].to_lowercase() == "--log" {
250264
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
251265
log_transform = true;
@@ -610,7 +624,128 @@ impl WhiteboxTool for FlowAccumulationFullWorkflow {
610624
Err(e) => return Err(e),
611625
};
612626

613-
// calculate the number of inflowing cells
627+
if correct_pntr {
628+
let (mut dir, mut dir_n, mut dir_no, mut new_val, mut old_val): (i8, i8, i8, i8, i8);
629+
let (mut l1, mut l2, mut r1, mut r2): (i8, i8, i8, i8);
630+
let (mut zl, mut zr, mut zn): (f64, f64, f64);
631+
let mut count: isize;
632+
let mut resolved: bool;
633+
for row in 1..(rows-1) { // buffer the edges
634+
for col in 1..(columns-1) {
635+
// flow_dir is the n-index of the flow revicing cell
636+
dir = flow_dir.get_value(row, col);
637+
old_val = dir;
638+
if dir >= 0 {
639+
// error 1: zig-zag where two flow directions intersect at 90deg angles
640+
// because scan order is top down, crosses should only occur on the bottom half
641+
l1 = dir + 7 - (8 * (dir + 7 > 7) as i8);
642+
r1 = dir + 1 - (8 * (dir + 1 > 7) as i8);
643+
l2 = r1 + 5 - (8 * (r1 + 5 > 7) as i8);
644+
r2 = l1 + 3 - (8 * (l1 + 3 > 7) as i8);
645+
dir_n = flow_dir.get_value(row + dy[l1 as usize], col + dx[l1 as usize]); // left
646+
zl = output.get_value(row + dy[l1 as usize], col + dx[l1 as usize]);
647+
dir_no = flow_dir.get_value(row + dy[r1 as usize], col + dx[r1 as usize]); // right
648+
zr = output.get_value(row + dy[r1 as usize], col + dx[r1 as usize]);
649+
zn = output.get_value(row + dy[dir as usize], col + dx[dir as usize]);
650+
if dir_n == r2 && zr != nodata && zr <= zn { // left -> right cross && not nodata && is lower
651+
new_val = r1;
652+
} else if dir_no == l2 && zl != nodata && zl <= zn { // right -> left cross && not nodata && is lower
653+
new_val = l1;
654+
} else { // keep original value
655+
new_val = dir;
656+
}
657+
658+
if new_val != dir && new_val % 2 == 0 && [0,6,7].contains(&new_val) { // make sure new val doesn't create error 1 where it can't be corrected
659+
l1 = new_val + 7 - (8 * (new_val + 7 > 7) as i8);
660+
r1 = new_val + 1 - (8 * (new_val + 1 > 7) as i8);
661+
l2 = r1 + 5 - (8 * (r1 + 5 > 7) as i8);
662+
r2 = l1 + 3 - (8 * (l1 + 3 > 7) as i8);
663+
dir_n = flow_dir.get_value(row + dy[l1 as usize], col + dx[l1 as usize]); // left
664+
dir_no = flow_dir.get_value(row + dy[r1 as usize], col + dx[r1 as usize]); // right
665+
if dir_n == r2 || dir_no == l2 { // avoid new error, roll back
666+
new_val = old_val;
667+
}
668+
}
669+
670+
// setting value here preserves solutions from error 1 but prevents this scan from beng parallelized
671+
flow_dir.set_value(row, col, new_val);
672+
673+
// error 2: overshoot where flow points to a cell that points back into the original neighbourhood
674+
resolved = false;
675+
count = 0;
676+
dir = new_val;
677+
while !resolved && dir >= 0 { // search until outflow is found, else keep original value.
678+
// find the flow reviever n-index.
679+
dir_n = flow_dir.get_value(row + dy[dir as usize], col + dx[dir as usize]);
680+
if dir_n >= 0 {
681+
old_val = dir; // always set to the last valid direction
682+
683+
// flow within these bounds flow back into the original neighbourhood.
684+
l1 = dir + 5 - (8 * (dir + 5 > 7) as i8);
685+
r1 = dir + 3 - (8 * (dir + 3 > 7) as i8);
686+
l2 = dir + 6 - (8 * (dir + 6 > 7) as i8);
687+
r2 = dir + 2 - (8 * (dir + 2 > 7) as i8);
688+
689+
if dir % 2 == 0 { // diagonal
690+
if dir_n == l1 {
691+
new_val = r1 + 4 - (8 * (r1 + 4 > 7) as i8);
692+
} else if dir_n == r1 {
693+
new_val = l1 + 4 - (8 * (l1 + 4 > 7) as i8);
694+
} else {
695+
new_val = dir;
696+
resolved = true;
697+
}
698+
} else { // cardinal
699+
if dir_n == l1 {
700+
new_val = r2 + 4 - (8 * (r2 + 4 > 7) as i8);
701+
} else if dir_n == r1 {
702+
new_val = l2 + 4 - (8 * (l2 + 4 > 7) as i8);
703+
} else if dir_n == l2 {
704+
new_val = r1 + 4 - (8 * (r1 + 4 > 7) as i8);
705+
} else if dir_n == r2 {
706+
new_val = l1 + 4 - (8 * (l1 + 4 > 7) as i8);
707+
} else {
708+
new_val = dir;
709+
resolved = true;
710+
};
711+
}
712+
713+
if new_val != dir && new_val % 2 == 0 && [0,6,7].contains(&new_val) { // make sure new val doesn't create error 1 where it can't be corrected
714+
l1 = new_val + 7 - (8 * (new_val + 7 > 7) as i8);
715+
r1 = new_val + 1 - (8 * (new_val + 1 > 7) as i8);
716+
l2 = r1 + 5 - (8 * (r1 + 5 > 7) as i8);
717+
r2 = l1 + 3 - (8 * (l1 + 3 > 7) as i8);
718+
dir_n = flow_dir.get_value(row + dy[l1 as usize], col + dx[l1 as usize]); // left
719+
dir_no = flow_dir.get_value(row + dy[r1 as usize], col + dx[r1 as usize]); // right
720+
if dir_n == r2 || dir_no == l2 { // roll back
721+
new_val = old_val;
722+
resolved = true;
723+
}
724+
}
725+
} else { // roll back
726+
new_val = old_val;
727+
resolved = true;
728+
}
729+
if count > 7 { // stuck in a loop, use original flow_dir
730+
new_val = flow_dir.get_value(row, col);
731+
resolved = true;
732+
}
733+
dir = new_val;
734+
count += 1;
735+
}
736+
flow_dir.set_value(row, col, new_val);
737+
}
738+
}
739+
if verbose {
740+
progress = (100.0_f64 * row as f64 / (rows - 1) as f64) as usize;
741+
if progress != old_progress {
742+
println!("Correcting flow direction: {}%", progress);
743+
old_progress = progress;
744+
}
745+
}
746+
}
747+
}
748+
614749
let flow_dir = Arc::new(flow_dir);
615750
let mut num_inflowing: Array2D<i8> = Array2D::new(rows, columns, -1, -1)?;
616751

0 commit comments

Comments
 (0)