81
81
# ' @param area_buffer desired buffer around the provided shapefile (in meters, default 1500)
82
82
# ' @param grid_resolution (integer) desired resolution of downloaded grid in kilometers. (see help page for more details)
83
83
# ' @param preview generate graphs showing previews of data download? (boolean)
84
+ # ' @param ncdf Set this parameter to be `TRUE` if you would like the downloaded files to remain in NCDF format (*.nc).
85
+ # ' @param continue if `ncdf` is `TRUE`, then you can pass a folder name (in passed directory) here to continue an aborted download session.
84
86
# ' @importFrom abind abind
85
87
# ' @importFrom dplyr nth mutate %>% tibble
86
- # ' @importFrom lubridate year month day hour
88
+ # ' @importFrom lubridate year month day hour as_datetime
87
89
# ' @importFrom mapview mapview
88
90
# ' @importFrom ncdf4 nc_open ncvar_get nc_close
89
91
# ' @importFrom purrr map
@@ -123,43 +125,11 @@ get_metno_reanalysis3 <-
123
125
mn_variables = NULL ,
124
126
area_buffer = 1500 ,
125
127
grid_resolution = NULL ,
126
- preview = TRUE
128
+ preview = TRUE ,
129
+ ncdf = FALSE ,
130
+ continue = NULL
127
131
){
128
132
129
- # supporting functions ----
130
- nc_open_retry <- function (link ) {
131
-
132
- nc_file <- tryCatch(expr = {ncdf4 :: nc_open(link )},
133
- error = function (cond ){
134
- warning(" failed.." )
135
- return (NA )
136
- })
137
-
138
- if (nc_file %> % length() > 1 ){
139
- return (nc_file )
140
- } else {
141
- print(" retry download.." )
142
- attempt = 1
143
- while ((attempt < 10 ) & (length(nc_file ) == 1 )){
144
- Sys.sleep(5 )
145
- attempt = attempt + 1
146
- nc_file <- tryCatch(expr = {ncdf4 :: nc_open(link )},
147
- error = function (cond ){
148
- warning(" failed.." , cond , " retry!" )
149
- return (NA )
150
- })
151
-
152
- }
153
-
154
- if (length(nc_file ) > 1 ){
155
- print(" connection re-established!" )
156
- return (nc_file )
157
- }else {
158
- stop(" download failed after 10 attempts." )
159
- }
160
- }
161
- }
162
-
163
133
get_coord_window <- function (area_path , area_buffer , preview ){
164
134
165
135
# get a base file to find the right x y
@@ -349,7 +319,7 @@ get_metno_reanalysis3 <-
349
319
var_q )
350
320
351
321
# create the daterange
352
- daterange <- seq(as.POSIXct (fromdate , tz = " CET " ), as.POSIXct (todate , tz = " CET " ), by = " hour" )
322
+ daterange <- seq(as_datetime (fromdate ), as_datetime (todate ), by = " hour" )
353
323
years <- lubridate :: year(daterange )
354
324
months <- lubridate :: month(daterange ) %> % stringr :: str_pad(width = 2 , side = " left" , pad = " 0" )
355
325
days <- lubridate :: day(daterange ) %> % stringr :: str_pad(width = 2 , side = " left" , pad = " 0" )
@@ -410,20 +380,35 @@ get_metno_reanalysis3 <-
410
380
}
411
381
412
382
download_ncfiles <- function (directory , foldername , full_urls , filenames ,
413
- years , mn_variables , geometry_type ) {
414
-
415
- # download batches per year
416
- yearbatch <- split(full_urls , f = years )
417
- filebatch <- split(filenames , f = years )
383
+ years , mn_variables , geometry_type ,
384
+ ncdf = FALSE , verbose = FALSE ) {
385
+
386
+ # ## This is where the switch to netcdf download should take place
387
+ # ## if the user opts for it! (ncdf4)
388
+ if (ncdf ){
389
+ savefiles = paste(directory , foldername , filenames , sep = " /" )
390
+ read_write_ncdf(url = full_urls , savefiles = savefiles ,
391
+ foldername = foldername , directory = directory ,
392
+ verbose = preview )
393
+ return (directory )
394
+ }
418
395
419
- # set list names
420
- years_string <- years %> % unique() %> % sort()
421
- names(yearbatch ) <- paste0(" y" , years_string )
396
+ # else: continue as normal
397
+ # download batches per year
398
+ yearbatch <- split(full_urls , f = years )
399
+ filebatch <- split(filenames , f = years )
400
+ # set list names
401
+ years_string <- years %> % unique() %> % sort()
402
+ names(yearbatch ) <- paste0(" y" , years_string )
422
403
423
404
for (cbyear in names(yearbatch )) {
424
405
print(paste0(" downloading: " , cbyear ))
425
406
url <- yearbatch [[cbyear ]]
426
407
408
+
409
+
410
+
411
+
427
412
ncin_crop <- nc_open_retry(url [1 ])
428
413
# pre-download first frame to get dimensions set
429
414
@@ -684,7 +669,8 @@ get_metno_reanalysis3 <-
684
669
if (grid_resolution < 1 ){stop(" `grid_resolution` must be greater than 1 km" )}
685
670
}
686
671
687
- if (preview == TRUE ){verbose = TRUE }
672
+ # this is truly crap, should fix..
673
+ if (preview == TRUE ){verbose = TRUE }else {verbose = FALSE }
688
674
689
675
if (directory %> % is.null()){
690
676
directory <- getwd()
@@ -720,8 +706,14 @@ get_metno_reanalysis3 <-
720
706
print(" building query.." )
721
707
queries <- build_query(bounding_coords , mn_variables , fromdate , todate , grid_resolution , verbose )
722
708
723
- print(" creating download folder.." )
724
- foldername <- create_download_folder(directory )
709
+ if (is.null(continue ) == FALSE ){
710
+ print(" continuing download folder.." )
711
+ foldername = continue
712
+ }else {
713
+ print(" creating download folder.." )
714
+ foldername <- create_download_folder(directory )
715
+
716
+ }
725
717
726
718
print(" starting download" )
727
719
if (bounding_coords %> % length() == 2 ){geometry_type = " point" }else {geometry_type = " polygon" }
@@ -734,9 +726,14 @@ get_metno_reanalysis3 <-
734
726
filenames = queries $ filenames ,
735
727
years = queries $ years ,
736
728
mn_variables = mn_variables ,
737
- geometry_type = geometry_type
729
+ geometry_type = geometry_type , ncdf = ncdf , verbose = preview
738
730
)
739
731
732
+ if (ncdf ){
733
+ if (preview ){cat(bold(green(" NCDF files finished downloading and are located here:" )), " \n " ,
734
+ blue(italic(underline(paste0(directory , " /" ,foldername )))), " \n " )}
735
+ return (paste0(directory , " /" ,foldername ))
736
+ }
740
737
print(" download complete!, merging files.." )
741
738
742
739
merged_data <- merge_rds(directory = directory ,
@@ -1269,3 +1266,57 @@ swat_weather_input_chain <-
1269
1266
1270
1267
print(" miljotools: pipeline finished!" )
1271
1268
}
1269
+
1270
+ # supporting functions ----
1271
+ nc_open_retry <- function (link ) {
1272
+
1273
+ nc_file <- tryCatch(expr = {ncdf4 :: nc_open(link )},
1274
+ error = function (cond ){
1275
+ warning(" failed.." )
1276
+ return (NA )
1277
+ })
1278
+
1279
+
1280
+ if (nc_file %> % length() > 1 ) {
1281
+ return (nc_file )
1282
+ } else {
1283
+ mt_print(TRUE , " nc_open_retry" , " retrying donwload with out longwave radiation" )
1284
+
1285
+ # https://github.com/metno/NWPdocs/wiki/MET-Nordic-dataset#parameters
1286
+ # find the location in the link where longwave radiation is, and remove it,
1287
+ # then try to open the file wihtout this variable.
1288
+ split = link %> % stringr :: str_split(" ," , simplify = T )
1289
+ longwave_index <- grepl(x = split , pattern = " longwave" ) %> % which()
1290
+ new_link = paste(split [- longwave_index ], collapse = " ," )
1291
+ new_nc_file <- tryCatch(
1292
+ expr = {ncdf4 :: nc_open(new_link )},
1293
+ error = function (cond ) {
1294
+ warning(" failed.." , cond , " retry!" )
1295
+ return (NA )})
1296
+ }
1297
+
1298
+ if (length(new_nc_file )> 1 ) {
1299
+ mt_print(TRUE , " nc_open_retry" , " download sans longradiation succeeded!" )
1300
+ warning(" file missing longwave radiation" )
1301
+ return (new_nc_file )
1302
+ } else {
1303
+ mt_print(TRUE , " nc_open_retry" , " retrying donwload.." )
1304
+
1305
+ attempt = 1
1306
+ while ((attempt < 10 ) & (length(nc_file ) == 1 )) {
1307
+ Sys.sleep(5 )
1308
+ attempt = attempt + 1
1309
+ nc_file <- tryCatch(expr = {ncdf4 :: nc_open(link )},
1310
+ error = function (cond ) {
1311
+ warning(" failed.." , cond , " retry!" )
1312
+ return (NA )})
1313
+ }
1314
+
1315
+ if (length(nc_file ) > 1 ) {
1316
+ mt_print(TRUE , " nc_open_retry" , " connection re-established!" )
1317
+ return (nc_file )
1318
+ } else {
1319
+ stop(" download failed after 10 attempts." )
1320
+ }
1321
+ }
1322
+ }
0 commit comments