From f10d8aa5d34da93b4ef9ded42a2a6c589f344570 Mon Sep 17 00:00:00 2001 From: Jonathan Leitschuh Date: Fri, 18 Nov 2022 22:54:51 +0000 Subject: [PATCH] vuln-fix: Temporary File Information Disclosure This fixes temporary file information disclosure vulnerability due to the use of the vulnerable `File.createTempFile()` method. The vulnerability is fixed by using the `Files.createTempFile()` method which sets the correct posix permissions. Weakness: CWE-377: Insecure Temporary File Severity: Medium CVSSS: 5.5 Detection: CodeQL & OpenRewrite (https://public.moderne.io/recipes/org.openrewrite.java.security.SecureTempFileCreation) Reported-by: Jonathan Leitschuh Signed-off-by: Jonathan Leitschuh Bug-tracker: https://github.com/JLLeitschuh/security-research/issues/18 Co-authored-by: Moderne --- .../java/org/seqdoop/hadoop_bam/BAMTestUtil.java | 5 +++-- .../org/seqdoop/hadoop_bam/TestBAMOutputFormat.java | 12 ++++++------ .../org/seqdoop/hadoop_bam/TestCRAMOutputFormat.java | 8 ++++---- .../org/seqdoop/hadoop_bam/TestFastqInputFormat.java | 5 +++-- .../org/seqdoop/hadoop_bam/TestQseqInputFormat.java | 5 +++-- .../org/seqdoop/hadoop_bam/TestVCFOutputFormat.java | 3 ++- .../org/seqdoop/hadoop_bam/TestVCFRoundTrip.java | 6 +++--- 7 files changed, 24 insertions(+), 20 deletions(-) diff --git a/src/test/java/org/seqdoop/hadoop_bam/BAMTestUtil.java b/src/test/java/org/seqdoop/hadoop_bam/BAMTestUtil.java index dda53a1..27909e4 100644 --- a/src/test/java/org/seqdoop/hadoop_bam/BAMTestUtil.java +++ b/src/test/java/org/seqdoop/hadoop_bam/BAMTestUtil.java @@ -11,6 +11,7 @@ import htsjdk.samtools.SamReaderFactory; import java.io.File; import java.io.IOException; +import java.nio.file.Files; class BAMTestUtil { public static File writeBamFile(int numPairs, SAMFileHeader.SortOrder sortOrder) @@ -42,7 +43,7 @@ public static File writeBamFile(int numPairs, SAMFileHeader.SortOrder sortOrder) ("test-read-%03d-unplaced-unmapped", numPairs++)); } - final File bamFile = File.createTempFile("test", ".bam"); + final File bamFile = Files.createTempFile("test", ".bam").toFile(); bamFile.deleteOnExit(); SAMFileHeader samHeader = samRecordSetBuilder.getHeader(); final SAMFileWriter bamWriter = new SAMFileWriterFactory() @@ -75,7 +76,7 @@ public static File writeBamFileWithLargeHeader() throws IOException { start2); } - final File bamFile = File.createTempFile("test", ".bam"); + final File bamFile = Files.createTempFile("test", ".bam").toFile(); bamFile.deleteOnExit(); SAMFileHeader samHeader = samRecordSetBuilder.getHeader(); StringBuffer sb = new StringBuffer(); diff --git a/src/test/java/org/seqdoop/hadoop_bam/TestBAMOutputFormat.java b/src/test/java/org/seqdoop/hadoop_bam/TestBAMOutputFormat.java index 357cec2..5ccde9f 100644 --- a/src/test/java/org/seqdoop/hadoop_bam/TestBAMOutputFormat.java +++ b/src/test/java/org/seqdoop/hadoop_bam/TestBAMOutputFormat.java @@ -91,7 +91,7 @@ public void setup() throws Exception { @Test public void testBAMRecordWriterNoHeader() throws Exception { - final File outFile = File.createTempFile("testBAMWriter", ".bam"); + final File outFile = Files.createTempFile("testBAMWriter", ".bam").toFile(); outFile.deleteOnExit(); final Path outPath = new Path(outFile.toURI()); @@ -120,7 +120,7 @@ public void testBAMRecordWriterNoHeader() throws Exception { @Test public void testBAMRecordWriterWithHeader() throws Exception { - final File outFile = File.createTempFile("testBAMWriter", ".bam"); + final File outFile = Files.createTempFile("testBAMWriter", ".bam").toFile(); outFile.deleteOnExit(); final Path outPath = new Path(outFile.toURI()); @@ -150,7 +150,7 @@ public void testBAMRecordWriterWithHeader() throws Exception { @Test public void testBAMOutput() throws Exception { final Path outputPath = doMapReduce(testBAMFileName); - final File outFile = File.createTempFile("testBAMWriter", ".bam"); + final File outFile = Files.createTempFile("testBAMWriter", ".bam").toFile(); outFile.deleteOnExit(); SAMFileMerger.mergeParts(outputPath.toUri().toString(), outFile.toURI().toString(), SAMFormat.BAM, samFileHeader); @@ -164,7 +164,7 @@ public void testEmptyBAM() throws Exception { SAMFileHeader.SortOrder.coordinate).toURI().toString(); conf.setBoolean(BAMOutputFormat.WRITE_SPLITTING_BAI, true); final Path outputPath = doMapReduce(bam); - final File outFile = File.createTempFile("testBAMWriter", ".bam"); + final File outFile = Files.createTempFile("testBAMWriter", ".bam").toFile(); outFile.deleteOnExit(); SAMFileMerger.mergeParts(outputPath.toUri().toString(), outFile.toURI().toString(), SAMFormat.BAM, new SAMRecordSetBuilder(true, SAMFileHeader.SortOrder.coordinate).getHeader()); @@ -194,7 +194,7 @@ public void testBAMWithSplittingBai() throws Exception { recordsAtSplits.addAll(getRecordsAtSplits(bamFile, index)); } - final File outFile = File.createTempFile("testBAMWriter", ".bam"); + final File outFile = Files.createTempFile("testBAMWriter", ".bam").toFile(); //outFile.deleteOnExit(); SAMFileMerger.mergeParts(outputPath.toUri().toString(), outFile.toURI().toString(), SAMFormat.BAM, @@ -231,7 +231,7 @@ public void testBAMRoundTrip() throws Exception { Path outputPath = doMapReduce(testBAMFileName); // merge the parts, and write to a temp file - final File outFile = File.createTempFile("testBAMWriter", ".bam"); + final File outFile = Files.createTempFile("testBAMWriter", ".bam").toFile(); outFile.deleteOnExit(); SAMFileMerger.mergeParts(outputPath.toUri().toString(), outFile.toURI().toString(), SAMFormat.BAM, samFileHeader); diff --git a/src/test/java/org/seqdoop/hadoop_bam/TestCRAMOutputFormat.java b/src/test/java/org/seqdoop/hadoop_bam/TestCRAMOutputFormat.java index 171013b..8e7ce1e 100644 --- a/src/test/java/org/seqdoop/hadoop_bam/TestCRAMOutputFormat.java +++ b/src/test/java/org/seqdoop/hadoop_bam/TestCRAMOutputFormat.java @@ -95,7 +95,7 @@ public void setup() throws Exception { @Test public void testCRAMRecordWriterNoHeader() throws Exception { - final File outFile = File.createTempFile("testCRAMWriter", ".cram"); + final File outFile = Files.createTempFile("testCRAMWriter", ".cram").toFile(); outFile.deleteOnExit(); final Path outPath = new Path(outFile.toURI()); @@ -125,7 +125,7 @@ public void testCRAMRecordWriterNoHeader() throws Exception { @Test public void testCRAMRecordWriterWithHeader() throws Exception { - final File outFile = File.createTempFile("testCRAMWriter", ".cram"); + final File outFile = Files.createTempFile("testCRAMWriter", ".cram").toFile(); outFile.deleteOnExit(); final Path outPath = new Path(outFile.toURI()); @@ -155,7 +155,7 @@ public void testCRAMRecordWriterWithHeader() throws Exception { @Test public void testCRAMOutput() throws Exception { final Path outputPath = doMapReduce(testCRAMFileName); - final File outFile = File.createTempFile("testCRAMWriter", ".cram"); + final File outFile = Files.createTempFile("testCRAMWriter", ".cram").toFile(); outFile.deleteOnExit(); SAMFileMerger.mergeParts(outputPath.toUri().toString(), outFile.toURI().toString(), SAMFormat.CRAM, samFileHeader); @@ -171,7 +171,7 @@ public void testCRAMRoundTrip() throws Exception { Path outputPath = doMapReduce(testCRAMFileName); // merge the parts, and write to a temp file - final File outFile = File.createTempFile("testCRAMWriter", ".cram"); + final File outFile = Files.createTempFile("testCRAMWriter", ".cram").toFile(); outFile.deleteOnExit(); SAMFileMerger.mergeParts(outputPath.toUri().toString(), outFile.toURI().toString(), SAMFormat.CRAM, samFileHeader); diff --git a/src/test/java/org/seqdoop/hadoop_bam/TestFastqInputFormat.java b/src/test/java/org/seqdoop/hadoop_bam/TestFastqInputFormat.java index 2574495..eb9cd9a 100644 --- a/src/test/java/org/seqdoop/hadoop_bam/TestFastqInputFormat.java +++ b/src/test/java/org/seqdoop/hadoop_bam/TestFastqInputFormat.java @@ -27,6 +27,7 @@ import java.io.File; import java.io.IOException; import java.io.PrintWriter; +import java.nio.file.Files; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.FileOutputStream; @@ -137,8 +138,8 @@ public class TestFastqInputFormat @Before public void setup() throws IOException { - tempFastq = File.createTempFile("test_fastq_input_format", "fastq"); - tempGz = File.createTempFile("test_fastq_input_format", ".gz"); + tempFastq = Files.createTempFile("test_fastq_input_format", "fastq").toFile(); + tempGz = Files.createTempFile("test_fastq_input_format", ".gz").toFile(); conf = new JobConf(); key = new Text(); fragment = new SequencedFragment(); diff --git a/src/test/java/org/seqdoop/hadoop_bam/TestQseqInputFormat.java b/src/test/java/org/seqdoop/hadoop_bam/TestQseqInputFormat.java index 359a1e2..a96f600 100644 --- a/src/test/java/org/seqdoop/hadoop_bam/TestQseqInputFormat.java +++ b/src/test/java/org/seqdoop/hadoop_bam/TestQseqInputFormat.java @@ -29,6 +29,7 @@ import java.io.File; import java.io.IOException; import java.io.PrintWriter; +import java.nio.file.Files; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.FileOutputStream; @@ -92,8 +93,8 @@ public class TestQseqInputFormat @Before public void setup() throws IOException { - tempQseq = File.createTempFile("test_qseq_input_format", "qseq"); - tempGz = File.createTempFile("test_qseq_input_format", ".gz"); + tempQseq = Files.createTempFile("test_qseq_input_format", "qseq").toFile(); + tempGz = Files.createTempFile("test_qseq_input_format", ".gz").toFile(); conf = new JobConf(); key = new Text(); fragment = new SequencedFragment(); diff --git a/src/test/java/org/seqdoop/hadoop_bam/TestVCFOutputFormat.java b/src/test/java/org/seqdoop/hadoop_bam/TestVCFOutputFormat.java index 4002f1a..386facf 100644 --- a/src/test/java/org/seqdoop/hadoop_bam/TestVCFOutputFormat.java +++ b/src/test/java/org/seqdoop/hadoop_bam/TestVCFOutputFormat.java @@ -22,6 +22,7 @@ import java.io.*; import java.lang.reflect.InvocationTargetException; +import java.nio.file.Files; import java.util.*; import htsjdk.samtools.seekablestream.SeekableFileStream; @@ -54,7 +55,7 @@ public class TestVCFOutputFormat { @Before public void setup() throws IOException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { - test_vcf_output = File.createTempFile("test_vcf_output", ""); + test_vcf_output = Files.createTempFile("test_vcf_output", "").toFile(); test_vcf_output.delete(); writable = new VariantContextWritable(); Configuration conf = new Configuration(); diff --git a/src/test/java/org/seqdoop/hadoop_bam/TestVCFRoundTrip.java b/src/test/java/org/seqdoop/hadoop_bam/TestVCFRoundTrip.java index 0f61172..973a30c 100644 --- a/src/test/java/org/seqdoop/hadoop_bam/TestVCFRoundTrip.java +++ b/src/test/java/org/seqdoop/hadoop_bam/TestVCFRoundTrip.java @@ -32,6 +32,7 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -199,8 +200,7 @@ public void testRoundTripWithMerge() throws Exception { // merge the output VCFHeader vcfHeader = VCFHeaderReader.readHeaderFrom(new SeekableFileStream(new File(testVCFFileName))); - final File outFile = File.createTempFile("testVCFWriter", - testVCFFileName.substring(testVCFFileName.lastIndexOf("."))); + final File outFile = Files.createTempFile("testVCFWriter", testVCFFileName.substring(testVCFFileName.lastIndexOf("."))).toFile(); outFile.deleteOnExit(); VCFFileMerger.mergeParts(outputPath.toUri().toString(), outFile.toURI().toString(), vcfHeader); @@ -256,7 +256,7 @@ private static VCFFileReader parseVcf(File vcf) throws IOException { File actualVcf; // work around TribbleIndexedFeatureReader not reading header from .bgz files if (vcf.getName().endsWith(".bgz")) { - actualVcf = File.createTempFile(vcf.getName(), ".gz"); + actualVcf = Files.createTempFile(vcf.getName(), ".gz").toFile(); actualVcf.deleteOnExit(); Files.copy(vcf, actualVcf); } else {