Skip to content

Commit

Permalink
fix conflict
Browse files Browse the repository at this point in the history
  • Loading branch information
OldLipe committed Jul 23, 2024
2 parents 9109a7f + 95c7b15 commit 9f4768f
Show file tree
Hide file tree
Showing 5 changed files with 149 additions and 13 deletions.
4 changes: 2 additions & 2 deletions R/api_data.R
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,8 @@
tidyr::drop_na()
# checking samples consistency
.data_check(ts_tbl_size, nrow(ts_tbl))
# add base class
class(ts_tbl) <- c("sits_base", class(ts_tbl))
# add base class (`sits` is added as it is removed in the join above)
class(ts_tbl) <- unique(c("sits_base", "sits", class(ts_tbl)))
}
return(ts_tbl)
}
Expand Down
2 changes: 1 addition & 1 deletion R/api_request.R
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@
#' @param header A named list with values to be passed in headers.
#'
#' @return A request object returned by the requisition package.
.request_headers <- function(req_obj, ...) {
.request_headers <- function(req_obj, header) {
# check package
pkg_class <- .request_check_package()

Expand Down
3 changes: 2 additions & 1 deletion R/api_request_httr2.R
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@
"Accept" = "*/*",
"Connection" = "keep-alive"
)
header_values <- modifyList(

header_values <- utils::modifyList(
x = header,
val = default_value
)
Expand Down
10 changes: 5 additions & 5 deletions R/api_s2tile.R
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@
} else {
# create a sf of points
epsg_lst <- unique(s2_tb[["epsg"]])
points_sf <- sf::st_cast(.map_dfr(epsg_lst, function(epsg) {
points_sf <- sf::st_as_sf(.map_dfr(epsg_lst, function(epsg) {
tiles <- dplyr::filter(s2_tb, epsg == {{epsg}})
sfc <- matrix(c(tiles[["xmin"]], tiles[["ymin"]]), ncol = 2) |>
sf::st_multipoint(dim = "XY") |>
sf::st_sfc(crs = epsg) |>
sf::st_transform(crs = "EPSG:4326")
sf::st_sf(geom = sfc)
}), "POINT")

}))
points_sf <- sf::st_cast(points_sf, "POINT")
# change roi to 1.5 degree to west and south
roi_search <- .bbox_as_sf(
dplyr::mutate(
Expand Down Expand Up @@ -58,7 +58,7 @@
})

# transform each sf to WGS84 and merge them into a single one sf object
s2_tiles <- .map_dfr(s2_sf_lst, function(s2_sf) {
s2_tiles <- sf::st_as_sf(.map_dfr(s2_sf_lst, function(s2_sf) {
s2_sf <- sf::st_as_sf(
x = s2_sf,
sf_column_name = "geom",
Expand All @@ -68,7 +68,7 @@
x = sf::st_segmentize(s2_sf, 10980),
crs = "EPSG:4326"
)
})
}))

# if roi is given, filter tiles by desired roi
if (.has(roi))
Expand Down
143 changes: 139 additions & 4 deletions tests/testthat/test-data.R
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@ test_that("Reading a LAT/LONG from RASTER", {
expect_equal(names(point_ndvi)[1], "longitude")
expect_true(ncol(.tibble_time_series(point_ndvi)) == 2)
expect_true(length(sits_timeline(point_ndvi)) == 12)
expect_true(
all(
c("sits", "tbl_df", "tbl", "data.frame") %in% class(point_ndvi)
)
)

samples2 <- tibble::tibble(longitude = -55.66738, latitude = 11.76990)
expect_warning(
Expand Down Expand Up @@ -49,6 +54,11 @@ test_that("Reading a CSV file from RASTER", {
expect_equal(length(names(points_poly)), 7)
expect_true(ncol(.tibble_time_series(points_poly)) == 2)
expect_true(length(sits_timeline(points_poly)) == 12)
expect_true(
all(
c("sits", "tbl_df", "tbl", "data.frame") %in% class(points_poly)
)
)

Sys.setenv("SITS_SAMPLES_CACHE_DIR" = tempdir())

Expand All @@ -63,6 +73,12 @@ test_that("Reading a CSV file from RASTER", {
expect_equal(length(names(points_df)), 7)
expect_true(ncol(.tibble_time_series(points_df)) == 2)
expect_true(length(sits_timeline(points_df)) == 12)
expect_true(
all(
c("sits", "tbl_df", "tbl", "data.frame") %in% class(points_df)
)
)

Sys.unsetenv("SITS_SAMPLES_CACHE_DIR")
})

Expand Down Expand Up @@ -113,6 +129,11 @@ test_that("Retrieving points from BDC using POLYGON shapefiles", {
object = unique(points_shp[["end_date"]]),
expected = as.Date(cube_timeline[length(cube_timeline)])
)
expect_true(
all(
c("sits", "tbl_df", "tbl", "data.frame") %in% class(points_shp)
)
)

# test bounding box
polygons_bbox <- .bbox(sf_mt)
Expand Down Expand Up @@ -181,7 +202,7 @@ test_that("Retrieving points from BDC using POLYGON shapefiles", {
)
})

test_that("Retrieving points from MPC using POINT shapefiles", {
test_that("Retrieving points from BDC using POINT shapefiles", {
shp_file <- system.file(
"extdata/shapefiles/cerrado/cerrado_forested.shp",
package = "sits"
Expand Down Expand Up @@ -226,6 +247,12 @@ test_that("Retrieving points from MPC using POINT shapefiles", {
object = unique(points_cf[["end_date"]]),
expected = as.Date(cube_timeline[length(cube_timeline)])
)
expect_true(
all(
c("sits", "tbl_df", "tbl", "data.frame") %in% class(points_cf)
)
)

points_bbox <- .bbox(sf_cf)

points_in_bbox <- dplyr::filter(
Expand All @@ -237,7 +264,7 @@ test_that("Retrieving points from MPC using POINT shapefiles", {
)
})

test_that("Retrieving points from MPC using sits tibble", {
test_that("Retrieving points from BDC using sits tibble", {
cube_bbox <- sits_bbox(cerrado_2classes)
# create a raster cube file based on the bbox of the sits tibble
modis_cube <- .try(
Expand Down Expand Up @@ -282,6 +309,11 @@ test_that("Retrieving points from MPC using sits tibble", {
object = unique(points_tb[["end_date"]]),
expected = as.Date(cube_timeline[length(cube_timeline)])
)
expect_true(
all(
c("sits", "tbl_df", "tbl", "data.frame") %in% class(points_tb)
)
)
})

test_that("Retrieving points from BDC using sf objects", {
Expand Down Expand Up @@ -329,6 +361,11 @@ test_that("Retrieving points from BDC using sf objects", {
object = unique(points_cf[["end_date"]]),
expected = as.Date(cube_timeline[length(cube_timeline)])
)
expect_true(
all(
c("sits", "tbl_df", "tbl", "data.frame") %in% class(points_cf)
)
)

points_bbox <- .bbox(sf_cf)

Expand Down Expand Up @@ -385,10 +422,14 @@ test_that("Retrieving points from BDC using sf objects", {
object = unique(points_poly[["end_date"]]),
expected = as.Date(cube_timeline[length(cube_timeline)])
)
expect_true(
all(
c("sits", "tbl_df", "tbl", "data.frame") %in% class(points_poly)
)
)

# test bounding box
polygons_bbox <- .bbox(sf_mt)

points_poly_in_bbox <- dplyr::filter(
points_poly,
.data[["longitude"]] >= polygons_bbox[["xmin"]],
Expand All @@ -400,6 +441,89 @@ test_that("Retrieving points from BDC using sf objects", {
expect_true(nrow(points_poly_in_bbox) == nrow(points_poly))
})

test_that("Retrieving points from MPC Base Cube", {
regdir <- paste0(tempdir(), "/base_cube_reg/")
if (!dir.exists(regdir)) {
suppressWarnings(dir.create(regdir))
}
# define roi
roi <- list(
lon_min = -55.69004,
lon_max = -55.62223,
lat_min = -11.78788,
lat_max = -11.73343
)
# load sentinel-2 cube
s2_cube <- sits_cube(
source = "MPC",
collection = "SENTINEL-2-L2A",
start_date = "2019-01-01",
end_date = "2019-01-20",
bands = c("B05"),
tiles = "21LXH",
progress = FALSE
)
s2_cube <- suppressWarnings(sits_regularize(
cube = s2_cube,
period = "P16D",
res = 320,
multicores = 1,
tiles = "21LXH",
output_dir = regdir,
progress = FALSE
))
# load dem cube
dem_cube <- sits_cube(
source = "MPC",
collection = "COP-DEM-GLO-30",
tiles = "21LXH"
)
dem_cube <- sits_regularize(
cube = dem_cube,
multicores = 1,
res = 232,
tiles = "21LXH",
output_dir = regdir
)
# create base cube
base_cube <- sits_add_base_cube(s2_cube, dem_cube)
# load samples
samples <- read.csv(
system.file("extdata/samples/samples_sinop_crop.csv", package = "sits")
)
# edit samples to work with the cube (test purposes only)
samples[["start_date"]] <- "2019-01-02"
samples[["end_date"]] <- "2019-01-02"
# extract data
samples_ts <- sits_get_data(
base_cube,
samples = samples,
crs = 32721,
multicores = 1
)
# validations
cube_timeline <- sits_timeline(base_cube)
expect_equal(object = nrow(samples_ts), expected = 18)
expect_equal(
object = unique(samples_ts[["start_date"]]),
expected = as.Date(cube_timeline[1])
)
expect_equal(
object = unique(samples_ts[["end_date"]]),
expected = as.Date(cube_timeline[length(cube_timeline)])
)
expect_true(
all(
c("sits_base", "sits", "tbl_df", "tbl", "data.frame") %in%
class(samples_ts)
)
)

unlink(s2_cube[["file_info"]][[1]]$path)
unlink(dem_cube[["file_info"]][[1]]$path)
unlink(base_cube[["file_info"]][[1]]$path)
})

test_that("Reading metadata from CSV file", {
csv_file <- paste0(tempdir(), "/cerrado_2classes.csv")
sits_to_csv(cerrado_2classes, file = csv_file)
Expand Down Expand Up @@ -476,7 +600,12 @@ test_that("Reading data from Classified data", {
expect_equal(
nrow(points_poly), nrow(read.csv(csv_raster_file))
)

expect_true(
all(
c("predicted", "sits", "tbl_df", "tbl", "data.frame") %in%
class(points_poly)
)
)
expect_equal(
colnames(points_poly), c(
"longitude", "latitude",
Expand Down Expand Up @@ -545,4 +674,10 @@ test_that("Reading data from Classified data from STAC", {
"label", "cube", "predicted"
)
)
expect_true(
all(
c("predicted", "sits", "tbl_df", "tbl", "data.frame") %in%
class(points_poly)
)
)
})

0 comments on commit 9f4768f

Please sign in to comment.