Skip to content
This repository has been archived by the owner on May 14, 2024. It is now read-only.

Commit

Permalink
Lint package
Browse files Browse the repository at this point in the history
  • Loading branch information
adamhsparks committed Mar 29, 2021
1 parent 32c6844 commit 5a64ea5
Show file tree
Hide file tree
Showing 16 changed files with 116 additions and 180 deletions.
16 changes: 0 additions & 16 deletions R/get_ag_bulletin.R
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ get_ag_bulletin <- function(state = "AUS") {
xml_url <- .create_bom_file(AUS_XML,
.the_state = cleaned_state,
.file_loc = file_loc)

bulletin_out <- .parse_bulletin(xml_url)
if (is.null(bulletin_out)) {
return(invisible(NULL))
Expand All @@ -110,7 +109,6 @@ get_ag_bulletin <- function(state = "AUS") {
# CRAN NOTE avoidance
stations_site_list <-
site <- obs_time_local <- obs_time_utc <- r <- .SD <- NULL # nocov

# load the XML from ftp
if (substr(xml_url, 1, 3) == "ftp") {
xml_object <- .get_url(remote_file = xml_url)
Expand All @@ -120,17 +118,14 @@ get_ag_bulletin <- function(state = "AUS") {
} else {# load the XML from local
xml_object <- xml2::read_xml(xml_url)
}

# get definitions (and all possible value fields to check against)
definition_attrs <- xml2::xml_find_all(xml_object, "//data-def")
definition_attrs <- xml2::xml_attrs(definition_attrs)
definition_attrs <-
lapply(definition_attrs, function(x)
x[[1]][[1]])

# get the actual observations and create a data table
observations <- xml2::xml_find_all(xml_object, ".//d")

out <- data.table::data.table(
obs_time_local = xml2::xml_find_first(observations, ".//ancestor::obs") %>%
xml2::xml_attr("obs-time-local"),
Expand All @@ -148,24 +143,20 @@ get_ag_bulletin <- function(state = "AUS") {
1,
nchar(basename(xml_url)) - 4)
)

out <- data.table::dcast(
out,
product_id + obs_time_local + obs_time_utc + time_zone + site + station ~
observation,
value.var = "values"
)

# check that all fields are present, if not add missing col with NAs
missing <-
setdiff(unlist(definition_attrs), names(out[, -c(1:5)]))
if (length(missing) != 0) {
out[, eval(missing) := NA]
}

# remove leading 0 to merge with stations_site_list
out[, site := gsub("^0{1,2}", "", out$site)]

# merge with AAC codes
# load AAC code/town name list to join with final output
load(system.file("extdata", "stations_site_list.rda", # nocov
Expand All @@ -174,7 +165,6 @@ get_ag_bulletin <- function(state = "AUS") {
data.table::setkey(stations_site_list, "site")
data.table::setkey(out, "site")
out <- stations_site_list[out, on = "site"]

# tidy up the cols
refcols <- c(
"product_id",
Expand Down Expand Up @@ -208,13 +198,11 @@ get_ag_bulletin <- function(state = "AUS") {
"t1m",
"wr"
)

# set col classes
# factor
out[, c(1:3, 11:12) := lapply(.SD, function(x)
as.factor(x)),
.SDcols = c(1:3, 11:12)]

# dates
out[, obs_time_local := gsub("T", " ", obs_time_local)]
out[, obs_time_utc := gsub("T", " ", obs_time_utc)]
Expand All @@ -223,16 +211,12 @@ get_ag_bulletin <- function(state = "AUS") {
origin = "1970-1-1",
format = "%Y%m%d %H%M")),
.SDcols = c(13:14)]

# set "Tce" to 0.01
out[, r := gsub("Tce", "0.01", r)]

# set numeric cols
out[, c(4:7, 9:10, 17:30) := lapply(.SD, as.numeric),
.SDcols = c(4:7, 9:10, 17:30)]

data.table::setcolorder(out, refcols)

# return from main function
return(out)
}
34 changes: 0 additions & 34 deletions R/get_coastal_forecast.R
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@
get_coastal_forecast <- function(state = "AUS") {
# this is just a placeholder for functionality with parse_coastal_forecast()
filepath <- NULL

# see internal_functions.R for these functions
the_state <- .check_states(state)
location <- .validate_filepath(filepath)
Expand Down Expand Up @@ -87,7 +86,6 @@ get_coastal_forecast <- function(state = "AUS") {
xml_url <- .create_bom_file(AUS_XML,
.the_state = cleaned_state,
.file_loc = file_loc)

coastal_out <- .parse_coastal_forecast(xml_url)
if (is.null(coastal_out)) {
return(invisible(NULL))
Expand All @@ -112,7 +110,6 @@ get_coastal_forecast <- function(state = "AUS") {
state_code <-
tropical_system_location <-
forecast_waves <- .SD <- AAC_codes <- NULL # nocov end

# load the XML from ftp
if (substr(xml_url, 1, 3) == "ftp") {
xml_object <- .get_url(xml_url)
Expand All @@ -123,50 +120,37 @@ get_coastal_forecast <- function(state = "AUS") {
# load the XML from local
xml_object <- xml2::read_xml(xml_url)
}

out <- .parse_coastal_xml(xml_object)

# clean up and split out time cols into offset and remove extra chars
.split_time_cols(x = out)

# merge with aac codes for location information
load(system.file("extdata",
"marine_AAC_codes.rda",
package = "bomrang")) # nocov
data.table::setkey(out, "aac")
out <- marine_AAC_codes[out, on = c("aac", "dist_name")]

# add state field
out[, state_code := gsub("_.*", "", out$aac)]

# return final forecast object

# add product ID field
out[, product_id := substr(basename(xml_url),
1,
nchar(basename(xml_url)) - 4)]

# some fields only come out on special occasions, if absent, add as NA
if (!"forecast_swell2" %in% colnames(out)) {
out[, forecast_swell2 := NA]
}

if (!"forecast_caution" %in% colnames(out)) {
out[, forecast_caution := NA]
}

if (!"marine_forecast" %in% colnames(out)) {
out[, marine_forecast := NA]
}

if (!"tropical_system_location" %in% colnames(out)) {
out[, tropical_system_location := NA]
}

if (!"forecast_waves" %in% colnames(out)) {
out[, forecast_waves := NA]
}

# reorder columns
refcols <- c(
"index",
Expand All @@ -192,21 +176,17 @@ get_coastal_forecast <- function(state = "AUS") {
"tropical_system_location",
"forecast_waves"
)

data.table::setcolorder(out, refcols)

# set col classes
# factors
out[, c(1, 11) := lapply(.SD, function(x)
as.factor(x)),
.SDcols = c(1, 11)]

out[, c(9:10) := lapply(.SD, function(x)
as.POSIXct(x,
origin = "1970-1-1",
format = "%Y-%m-%d %H:%M:%OS")),
.SDcols = c(9:10)]

out[, c(12:13) := lapply(.SD, function(x)
as.POSIXct(
x,
Expand All @@ -215,12 +195,10 @@ get_coastal_forecast <- function(state = "AUS") {
tz = "GMT"
)),
.SDcols = c(12:13)]

# character
out[, c(6:8, 14:20) := lapply(.SD, function(x)
as.character(x)),
.SDcols = c(6:8, 14:20)]

return(out)
}

Expand All @@ -238,11 +216,9 @@ get_coastal_forecast <- function(state = "AUS") {
forecast_waves <- synoptic_situation <- # nocov start
preamble <- warning_summary_footer <- product_footer <-
postamble <- NULL # nocov end

# get the actual forecast objects
meta <- xml2::xml_find_all(xml_object, ".//text")
fp <- xml2::xml_find_all(xml_object, ".//forecast-period")

locations_index <- data.table::data.table(
# find all the aacs
aac = xml2::xml_parent(meta) %>%
Expand All @@ -269,7 +245,6 @@ get_coastal_forecast <- function(state = "AUS") {
xml2::xml_find_first(".//parent::forecast-period") %>%
xml2::xml_attr("start-time-local")
)

vals <- lapply(fp, function(node) {
# find names of all children nodes
childnodes <- node %>%
Expand All @@ -281,40 +256,31 @@ get_coastal_forecast <- function(state = "AUS") {
xml2::xml_attr("type")
# create columns names based on either node name or attr value
names <- ifelse(is.na(names), childnodes, names)

# find all values
values <- node %>%
xml2::xml_children() %>%
xml2::xml_text()

# create data frame and properly label the columns
df <- data.frame(t(values), stringsAsFactors = FALSE)
names(df) <- names
df
})

vals <- data.table::rbindlist(vals, fill = TRUE)
sub_out <- cbind(locations_index, vals)

if ("synoptic_situation" %in% names(sub_out)) {
sub_out[, synoptic_situation := NULL]
}

if ("preamble" %in% names(sub_out)) {
sub_out[, preamble := NULL]
}

if ("warning_summary_footer" %in% names(sub_out)) {
sub_out[, warning_summary_footer := NULL]
}

if ("product_footer" %in% names(sub_out)) {
sub_out[, product_footer := NULL]
}

if ("postamble" %in% names(sub_out)) {
sub_out[, postamble := NULL]
}

return(sub_out)
}
2 changes: 1 addition & 1 deletion R/get_current_weather.R
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ get_current_weather <-
strict = FALSE,
latlon = NULL,
emit_latlon_msg = TRUE) {
JSONurl_site_list <- name <- .SD <- na_if <- NULL
JSONurl_site_list <- name <- .SD <- NULL

# Load JSON URL list
load(system.file("extdata", "JSONurl_site_list.rda", # nocov start
Expand Down
7 changes: 4 additions & 3 deletions R/get_historical_weather.R
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,8 @@ get_historical_weather <- get_historical <-
"quality"),
solar = c("solar_exposure")
))
dat[["station_number"]] <- sprintf("%06d", as.integer(dat[["station_number"]]))
dat[["station_number"]] <- sprintf("%06d",
as.integer(dat[["station_number"]]))

return(
structure(
Expand Down Expand Up @@ -259,8 +260,8 @@ get_historical <- get_historical_weather
# read the station list in as a vector first so that we can
# detect and remove the header and footer...
ncc <- readLines(weather[i])
header_start <- grep('^\\-+$', ncc) + 1L
footer_start <- grep('^[0-9]+ stations', ncc) - 1L
header_start <- grep("^\\-+$", ncc) + 1L
footer_start <- grep("^[0-9]+ stations", ncc) - 1L

if (length(header_start > 0) && length(footer_start > 0)) {
# ... then process it as a data frame
Expand Down
4 changes: 0 additions & 4 deletions R/get_precis_forecast.R
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,6 @@ get_precis_forecast <- function(state = "AUS") {
#'
#' @noRd
.return_precis <- function(file_loc, cleaned_state) {

product_id <- probability_of_precipitation <- lower_precipitation_limit <- NULL

# create vector of XML files
AUS_XML <- c(
"IDN11060.xml",
Expand All @@ -98,7 +95,6 @@ get_precis_forecast <- function(state = "AUS") {
xml_url <- .create_bom_file(AUS_XML,
.the_state = cleaned_state,
.file_loc = file_loc)

precis_out <- .parse_precis_forecast(xml_url)
if (is.null(precis_out)) {
return(invisible(NULL))
Expand Down
24 changes: 12 additions & 12 deletions R/get_radar_imagery.R
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,12 @@ get_available_radar <- function(radar_id = "all") {
radar_locations <- NULL #nocov
load(system.file("extdata", "radar_locations.rda", package = "bomrang"))
list_files <- curl::new_handle()
curl::handle_setopt(handle = list_files,
FTP_RESPONSE_TIMEOUT = 200000,
CONNECTTIMEOUT = 90,
ftp_use_epsv = TRUE,
dirlistonly = TRUE
curl::handle_setopt(
handle = list_files,
FTP_RESPONSE_TIMEOUT = 200000,
CONNECTTIMEOUT = 90,
ftp_use_epsv = TRUE,
dirlistonly = TRUE
)
con <- curl::curl(url = ftp_base, "r", handle = list_files)
files <- readLines(con)
Expand All @@ -65,7 +66,7 @@ get_available_radar <- function(radar_id = "all") {
if (radar_id[1] == "all") {
dat <- dat
} else if (is.numeric(radar_id) && radar_id %in% dat$Radar_id) {
dat <- dat[dat$Radar_id %in% radar_id, ]
dat <- dat[dat$Radar_id %in% radar_id,]
} else{
stop("radar_id not found")
}
Expand Down Expand Up @@ -129,10 +130,10 @@ get_radar_imagery <- get_radar <-
call. = FALSE
)
}

ftp_base <- "ftp://ftp.bom.gov.au/anon/gen/radar"
fp <- file.path(ftp_base, paste0(product_id, ".gif"))

if (is.null(path)) {
path <- tempfile(fileext = ".gif", tmpdir = tempdir())
}
Expand Down Expand Up @@ -167,10 +168,9 @@ get_radar_imagery <- get_radar <-
},
error = function() {
return(magick::image_read(
path =
system.file("error_images",
"image_error_message.png",
package = "bomrang")
path = system.file("error_images",
"image_error_message.png",
package = "bomrang")
))
})
}
7 changes: 4 additions & 3 deletions R/get_satellite_imagery.R
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,10 @@ get_available_imagery <- function(product_id = "all") {
#'
#' Fetch \acronym{BOM} satellite GeoTIFF imagery from
#' \url{ftp://ftp.bom.gov.au/anon/gen/gms/} and return a raster
#' \code{\link[terra]{SpatRaster}} object of 'GeoTIFF' files. Files are available at
#' ten minute update frequency with a 24 hour delete time. Suggested to check
#' file availability first by using \code{\link{get_available_imagery}}.
#' \code{\link[terra]{SpatRaster}} object of 'GeoTIFF' files. Files are
#' available at ten minutes update frequency with a 24 hour delete time.
#' It is suggested to check file availability first by using
#' \code{\link{get_available_imagery}}.
#'
#' @param product_id Character. \acronym{BOM} product ID to download in
#' 'GeoTIFF' format and import as a \code{\link[terra]{SpatRaster}} object. A
Expand Down
Loading

0 comments on commit 5a64ea5

Please sign in to comment.