Skip to content

Commit ed2098b

Browse files
author
Jaden Peterson
committed
Don't write to the temporary directory in ZincRunner
1 parent 269f41e commit ed2098b

File tree

2 files changed

+110
-3
lines changed

2 files changed

+110
-3
lines changed

src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ package workers.common
44
import java.io.File
55
import java.nio.file.{Path, Paths}
66
import sbt.internal.inc.Analysis
7-
import sbt.internal.inc.consistent.ConsistentFileAnalysisStore
87
import xsbti.compile.AnalysisStore
98
import xsbti.compile.analysis.ReadWriteMappers
109

@@ -29,13 +28,12 @@ object AnalysisUtil {
2928
ConsistentFileAnalysisStore.text(
3029
analysisStoreFile,
3130
readWriteMappers,
32-
sort = true,
3331
)
3432
} else {
3533
ConsistentFileAnalysisStore.binary(
3634
analysisStoreFile,
3735
readWriteMappers,
38-
sort = true,
36+
reproducible = true,
3937
)
4038
}
4139
}
Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
/*
2+
* Zinc - The incremental compiler for Scala.
3+
* Copyright Scala Center, Lightbend, and Mark Harrah
4+
*
5+
* Licensed under Apache License 2.0
6+
* SPDX-License-Identifier: Apache-2.0
7+
*
8+
* See the NOTICE file distributed with this work for
9+
* additional information regarding copyright ownership.
10+
*/
11+
12+
package higherkindness.rules_scala
13+
package workers.common
14+
15+
import java.io.{File, FileInputStream, FileOutputStream}
16+
import java.nio.file.Files
17+
import java.util.Optional
18+
import sbt.internal.inc.consistent.{ConsistentAnalysisFormat, Deserializer, ParallelGzipOutputStream, Serializer, SerializerFactory}
19+
import sbt.io.{IO, Using}
20+
import scala.jdk.OptionConverters.*
21+
import scala.util.control.Exception.allCatch
22+
import xsbti.compile.analysis.ReadWriteMappers
23+
import xsbti.compile.{AnalysisContents, AnalysisStore => XAnalysisStore}
24+
25+
/**
26+
* This is a modified version of Zinc's [[ConsistentFileAnalysisStore]], which you can view here:
27+
* [[https://github.com/sbt/zinc/blob/1.10.x/internal/zinc-persist/src/main/scala/sbt/internal/inc/consistent/ConsistentFileAnalysisStore.scala]]
28+
*
29+
* The only difference is that it doesn't write the analysis store to the temporary directory before copying it to the
30+
* destination. Doing so constitutes a write outside the sandbox directory provided during the work request, which
31+
* violates the Bazel multiplex worker protocol.
32+
*/
33+
object ConsistentFileAnalysisStore {
34+
def text(
35+
file: File,
36+
mappers: ReadWriteMappers,
37+
reproducible: Boolean = true,
38+
parallelism: Int = Runtime.getRuntime.availableProcessors(),
39+
): XAnalysisStore =
40+
new AStore(
41+
file,
42+
new ConsistentAnalysisFormat(mappers, reproducible),
43+
SerializerFactory.text,
44+
parallelism,
45+
)
46+
47+
def binary(file: File): XAnalysisStore =
48+
binary(
49+
file,
50+
mappers = ReadWriteMappers.getEmptyMappers(),
51+
reproducible = true,
52+
)
53+
54+
def binary(
55+
file: File,
56+
mappers: ReadWriteMappers,
57+
): XAnalysisStore =
58+
binary(
59+
file,
60+
mappers,
61+
reproducible = true,
62+
)
63+
64+
def binary(
65+
file: File,
66+
mappers: ReadWriteMappers,
67+
reproducible: Boolean,
68+
parallelism: Int = Runtime.getRuntime.availableProcessors(),
69+
): XAnalysisStore =
70+
new AStore(
71+
file,
72+
new ConsistentAnalysisFormat(mappers, reproducible),
73+
SerializerFactory.binary,
74+
parallelism,
75+
)
76+
77+
private final class AStore[S <: Serializer, D <: Deserializer](
78+
file: File,
79+
format: ConsistentAnalysisFormat,
80+
sf: SerializerFactory[S, D],
81+
parallelism: Int = Runtime.getRuntime.availableProcessors(),
82+
) extends XAnalysisStore {
83+
84+
def set(analysisContents: AnalysisContents): Unit = {
85+
val analysis = analysisContents.getAnalysis
86+
val setup = analysisContents.getMiniSetup
87+
if (!file.getParentFile.exists()) Files.createDirectories(file.getParentFile.toPath)
88+
val fout = new FileOutputStream(file)
89+
try {
90+
val gout = new ParallelGzipOutputStream(fout, parallelism)
91+
val ser = sf.serializerFor(gout)
92+
format.write(ser, analysis, setup)
93+
gout.close()
94+
} finally fout.close
95+
}
96+
97+
def get(): Optional[AnalysisContents] = {
98+
import sbt.internal.inc.JavaInterfaceUtil.EnrichOption
99+
allCatch.opt(unsafeGet()).toJava
100+
}
101+
102+
def unsafeGet(): AnalysisContents =
103+
Using.gzipInputStream(new FileInputStream(file)) { in =>
104+
val deser = sf.deserializerFor(in)
105+
val (analysis, setup) = format.read(deser)
106+
AnalysisContents.create(analysis, setup)
107+
}
108+
}
109+
}

0 commit comments

Comments
 (0)