diff --git a/R/mod_download_dataset.R b/R/mod_download_dataset.R index 6f5e631..803b7ed 100644 --- a/R/mod_download_dataset.R +++ b/R/mod_download_dataset.R @@ -63,9 +63,10 @@ download_dataset_server <- function(id, rv$export_file_xlsx <- tryCatch({ - + out.xlsx <- tempfile(fileext = ".xlsx") - DaparToolshed::write.excel(obj = dataIn(), filename = out.xlsx) + obj.cleaned <- DaparToolshed::CleanRowData(dataIn()) + DaparToolshed::write.excel(obj = obj.cleaned, filename = out.xlsx) out.xlsx }, @@ -73,7 +74,6 @@ download_dataset_server <- function(id, error = function(e) e ) - rv$export_file_qf <- tryCatch({ out.qf <- tempfile(fileext = ".qf") saveRDS(dataIn(), file = out.qf) diff --git a/inst/workflow/PipelinePeptide/R/PipelinePeptide_Aggregation.R b/inst/workflow/PipelinePeptide/R/PipelinePeptide_Aggregation.R index f12cc66..1d51ec9 100644 --- a/inst/workflow/PipelinePeptide/R/PipelinePeptide_Aggregation.R +++ b/inst/workflow/PipelinePeptide/R/PipelinePeptide_Aggregation.R @@ -524,6 +524,10 @@ PipelinePeptide_Aggregation_server <- function(id, dataOut$value <- NULL rv$steps.status['Aggregation'] <- stepStatus$VALIDATED } else { + dataOut$trigger <- MagellanNTK::Timestamp() + dataOut$value <- NULL + rv$steps.status['Aggregation'] <- stepStatus$VALIDATED + #rv.custom$temp.aggregate <- NULL } }) @@ -570,12 +574,7 @@ PipelinePeptide_Aggregation_server <- function(id, paramshistory(rv.custom$temp.aggregate[[length(rv.custom$temp.aggregate)]]) <- rv.custom$history - browser() - # new.dataset <- rv$temp.aggregate$obj.prot - # - # new.name <- paste0("Aggregated", ".", TypeOfDataset(new.dataset)) - # - # + rv$dataIn <- rv.custom$temp.aggregate # DO NOT MODIFY THE THREE FOLLOWING LINES dataOut$trigger <- MagellanNTK::Timestamp()