diff --git a/BenchmarkSupport/src/test/java/io/deephaven/benchmarking/impl/TestTableGeneration.java b/BenchmarkSupport/src/test/java/io/deephaven/benchmarking/impl/TestTableGeneration.java index db1ee0ce5f7..f5cd21e4a3d 100644 --- a/BenchmarkSupport/src/test/java/io/deephaven/benchmarking/impl/TestTableGeneration.java +++ b/BenchmarkSupport/src/test/java/io/deephaven/benchmarking/impl/TestTableGeneration.java @@ -4,7 +4,6 @@ package io.deephaven.benchmarking.impl; import io.deephaven.engine.table.Table; -import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.QueryTableTestBase; import io.deephaven.benchmarking.BenchmarkTable; import io.deephaven.benchmarking.BenchmarkTableBuilder; diff --git a/Generators/src/main/java/io/deephaven/libs/GroovyStaticImportGenerator.java b/Generators/src/main/java/io/deephaven/libs/GroovyStaticImportGenerator.java index ecb6da2b5a6..668447631b0 100644 --- a/Generators/src/main/java/io/deephaven/libs/GroovyStaticImportGenerator.java +++ b/Generators/src/main/java/io/deephaven/libs/GroovyStaticImportGenerator.java @@ -181,7 +181,7 @@ private GroovyStaticImportGenerator(final String[] imports, Collection c = Class.forName(imp); + Class c = Class.forName(imp, false, Thread.currentThread().getContextClassLoader()); log.info("Processing class: " + c); for (Method m : c.getMethods()) { diff --git a/Generators/src/main/java/io/deephaven/plot/util/GenerateFigureImmutable.java b/Generators/src/main/java/io/deephaven/plot/util/GenerateFigureImmutable.java index 3a716e62c99..6e5b9a15cc5 100644 --- a/Generators/src/main/java/io/deephaven/plot/util/GenerateFigureImmutable.java +++ b/Generators/src/main/java/io/deephaven/plot/util/GenerateFigureImmutable.java @@ -61,7 +61,7 @@ private GenerateFigureImmutable(final boolean isInterface, final String[] import this.functionNamer = functionNamer == null ? JavaFunction::getMethodName : functionNamer; for (final String imp : interfaces) { - final Class c = Class.forName(imp); + final Class c = Class.forName(imp, false, Thread.currentThread().getContextClassLoader()); log.info("Processing class: " + c); for (final Method m : c.getMethods()) { @@ -892,7 +892,7 @@ private Map> commonSig final Set functionSet = new HashSet<>(); for (String iface : interfaces) { - final Class c = Class.forName(iface); + final Class c = Class.forName(iface, false, Thread.currentThread().getContextClassLoader()); log.info("Processing class: " + c); for (final java.lang.reflect.Method m : c.getMethods()) { diff --git a/Generators/src/main/java/io/deephaven/plot/util/GenerateMultiSeries.java b/Generators/src/main/java/io/deephaven/plot/util/GenerateMultiSeries.java index e5d9db6b7c9..25f3210aa99 100644 --- a/Generators/src/main/java/io/deephaven/plot/util/GenerateMultiSeries.java +++ b/Generators/src/main/java/io/deephaven/plot/util/GenerateMultiSeries.java @@ -46,9 +46,11 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio } final Set skip = new HashSet<>(); - skip.add(Class.forName("io.deephaven.plot.datasets.DataSeries").getMethod("pointSize", int.class)); - skip.add(Class.forName("io.deephaven.plot.datasets.DataSeries").getMethod("pointSize", double.class)); - skip.add(Class.forName("io.deephaven.plot.datasets.DataSeries").getMethod("pointSize", long.class)); + final Class dataSeriesClass = Class.forName("io.deephaven.plot.datasets.DataSeries", false, + Thread.currentThread().getContextClassLoader()); + skip.add(dataSeriesClass.getMethod("pointSize", int.class)); + skip.add(dataSeriesClass.getMethod("pointSize", double.class)); + skip.add(dataSeriesClass.getMethod("pointSize", long.class)); new Generator("io.deephaven.plot.datasets.multiseries.MultiSeries", "DataSeriesInternal", @@ -170,7 +172,7 @@ static class Generator { this.isTransform = isTransform; this.isSwappable = isSwappable; this.interfaces = interfaces; - output = Class.forName(outputClass); + output = Class.forName(outputClass, false, Thread.currentThread().getContextClassLoader()); final int mod = output.getModifiers(); isInterface = Modifier.isInterface(mod); @@ -241,7 +243,7 @@ private String generateClasses(final Set skip) throws ClassNotFoundExcep final List sortedMethods = new ArrayList<>(); final List methodsWithFunctionParameter = new ArrayList<>(); for (final String clazz : interfaces) { - final Class dataseries = Class.forName(clazz); + final Class dataseries = Class.forName(clazz, false, Thread.currentThread().getContextClassLoader()); final Method[] methods = Arrays.stream(dataseries.getMethods()) .filter(m -> !skip.contains(m)) .toArray(Method[]::new); @@ -561,7 +563,8 @@ private String getFigureFunctionInput(final String returnClass, : "getPartitionedTableHandle().getTable(), "); if (function.getMethodName().equals("pointColorByY")) { - final Class c = Class.forName("io.deephaven.plot.datasets.multiseries." + returnClass); + final Class c = Class.forName("io.deephaven.plot.datasets.multiseries." + returnClass, false, + Thread.currentThread().getContextClassLoader()); final Method[] methods = Arrays.stream(c.getDeclaredMethods()) .filter(m -> m.getName().equals(tableMethodName)) .filter(m -> m.getParameterTypes().length > 0 && m.getParameterTypes()[0].equals(Table.class)) @@ -586,7 +589,8 @@ private String getFigureFunctionInput(final String returnClass, return code.append(", multiSeriesKey), this").toString(); } - final Class c = Class.forName(function.getClassName()); + final Class c = Class.forName(function.getClassName(), false, + Thread.currentThread().getContextClassLoader()); final Method[] methods = Arrays.stream(c.getMethods()) .filter(m -> m.getName().equals(tableMethodName)) .filter(m -> m.getParameterTypes().length > 0 && m.getParameterTypes()[0].equals(Table.class)) diff --git a/Generators/src/main/java/io/deephaven/plot/util/GeneratePlottingConvenience.java b/Generators/src/main/java/io/deephaven/plot/util/GeneratePlottingConvenience.java index a86bac79500..99c7feabac0 100644 --- a/Generators/src/main/java/io/deephaven/plot/util/GeneratePlottingConvenience.java +++ b/Generators/src/main/java/io/deephaven/plot/util/GeneratePlottingConvenience.java @@ -49,7 +49,7 @@ private GeneratePlottingConvenience(final String[] staticImports, final String[] final int lastDot = staticImport.lastIndexOf("."); final String classPath = staticImport.substring(0, lastDot); final String methodName = staticImport.substring(lastDot + 1); - final Class c = Class.forName(classPath); + final Class c = Class.forName(classPath, false, Thread.currentThread().getContextClassLoader()); log.info("Processing static class: " + c); final Method[] methods = Arrays.stream(c.getMethods()).filter( @@ -64,7 +64,7 @@ private GeneratePlottingConvenience(final String[] staticImports, final String[] } for (final String imp : imports) { - final Class c = Class.forName(imp); + final Class c = Class.forName(imp, false, Thread.currentThread().getContextClassLoader()); log.info("Processing class: " + c); for (final Method m : c.getMethods()) { diff --git a/Generators/src/main/java/io/deephaven/plot/util/GeneratePyV2FigureAPI.java b/Generators/src/main/java/io/deephaven/plot/util/GeneratePyV2FigureAPI.java index 43b7cd23ae8..f13655cd750 100644 --- a/Generators/src/main/java/io/deephaven/plot/util/GeneratePyV2FigureAPI.java +++ b/Generators/src/main/java/io/deephaven/plot/util/GeneratePyV2FigureAPI.java @@ -123,7 +123,7 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio * @throws ClassNotFoundException JCLASS is not found */ public static Map> getMethodSignatures() throws ClassNotFoundException { - final Class c = Class.forName(JCLASS); + final Class c = Class.forName(JCLASS, false, Thread.currentThread().getContextClassLoader()); final Map> signatures = new TreeMap<>(); for (final Method m : c.getMethods()) { diff --git a/Integrations/build.gradle b/Integrations/build.gradle index 58f270d42f9..bd049ef7ec3 100644 --- a/Integrations/build.gradle +++ b/Integrations/build.gradle @@ -18,6 +18,8 @@ dependencies { implementation project(':Configuration') implementation project(':log-factory') + testImplementation project(':engine-test-utils') + testRuntimeOnly project(':log-to-slf4j') // add configs, and some runtime dependencies to test classpaths testRuntimeOnly project(':configs') diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonDeephavenSession.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonDeephavenSession.java index 22b96513f28..0fdb179c6ce 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonDeephavenSession.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonDeephavenSession.java @@ -6,9 +6,10 @@ import io.deephaven.base.FileUtils; import io.deephaven.base.verify.Assert; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.exceptions.CancellationException; import io.deephaven.engine.context.QueryScope; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.AbstractScriptSession; import io.deephaven.engine.util.PythonEvaluator; import io.deephaven.engine.util.PythonEvaluatorJpy; @@ -69,6 +70,7 @@ public class PythonDeephavenSession extends AbstractScriptSession scope) { - super(NoOp.INSTANCE, null); + public PythonDeephavenSession( + final UpdateGraph updateGraph, final PythonScope scope) { + super(updateGraph, NoOp.INSTANCE, null); this.scope = (PythonScope) scope; try (final SafeCloseable ignored = executionContext.open()) { this.module = (PythonScriptSessionModule) PyModule.importModule("deephaven.server.script_session") @@ -186,9 +191,8 @@ public void popScope() { protected void evaluate(String command, String scriptName) { log.info().append("Evaluating command: " + command).endl(); try { - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLockedInterruptibly(() -> { - evaluator.evalScript(command); - }); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .doLockedInterruptibly(() -> evaluator.evalScript(command)); } catch (InterruptedException e) { throw new CancellationException(e.getMessage() != null ? e.getMessage() : "Query interrupted", e); } diff --git a/Integrations/src/test/java/io/deephaven/integrations/learn/ComputerTest.java b/Integrations/src/test/java/io/deephaven/integrations/learn/ComputerTest.java index 06ef8dbdaf9..29839e42e31 100644 --- a/Integrations/src/test/java/io/deephaven/integrations/learn/ComputerTest.java +++ b/Integrations/src/test/java/io/deephaven/integrations/learn/ComputerTest.java @@ -8,19 +8,21 @@ import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.table.impl.InMemoryTable; import io.deephaven.engine.table.ColumnSource; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import org.junit.*; import java.util.Objects; import java.util.function.Function; public class ComputerTest { - private static InMemoryTable table; + @Rule + public final EngineCleanup framework = new EngineCleanup(); - @BeforeClass - public static void createTable() { + private InMemoryTable table; + + @Before + public void createTable() { table = new InMemoryTable( new String[] {"Column1", "Column2", "Column3"}, new Object[] { diff --git a/Integrations/src/test/java/io/deephaven/integrations/learn/FutureTest.java b/Integrations/src/test/java/io/deephaven/integrations/learn/FutureTest.java index a89e8e66a9e..d2b7eef7081 100644 --- a/Integrations/src/test/java/io/deephaven/integrations/learn/FutureTest.java +++ b/Integrations/src/test/java/io/deephaven/integrations/learn/FutureTest.java @@ -8,19 +8,21 @@ import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.InMemoryTable; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import org.junit.*; import java.util.Objects; import java.util.function.Function; public class FutureTest { - private static InMemoryTable table; + @Rule + public final EngineCleanup framework = new EngineCleanup(); - @BeforeClass - public static void createTable() { + private InMemoryTable table; + + @Before + public void createTable() { table = new InMemoryTable( new String[] {"Column1", "Column2", "Column3"}, new Object[] { @@ -41,7 +43,7 @@ private static Input[] createInputs(Function gatherFunc) { return createInputs(gatherFunc, gatherFunc); } - private static Future createFuture(Function modelFunc, Input[] inputs, int batchSize) { + private Future createFuture(Function modelFunc, Input[] inputs, int batchSize) { return new Future(modelFunc, inputs, new ColumnSource[][] { table.view("Column1", "Column2").getColumnSources() diff --git a/Integrations/src/test/java/io/deephaven/integrations/learn/InputTest.java b/Integrations/src/test/java/io/deephaven/integrations/learn/InputTest.java index c86cb4aa297..045bb5d048f 100644 --- a/Integrations/src/test/java/io/deephaven/integrations/learn/InputTest.java +++ b/Integrations/src/test/java/io/deephaven/integrations/learn/InputTest.java @@ -6,19 +6,21 @@ import io.deephaven.engine.table.impl.InMemoryTable; import io.deephaven.engine.table.ColumnSource; import io.deephaven.api.util.NameValidator; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import org.junit.*; import java.util.Arrays; import java.util.function.Function; public class InputTest { - private static InMemoryTable table; + @Rule + public final EngineCleanup framework = new EngineCleanup(); - @BeforeClass - public static void createTable() { + private InMemoryTable table; + + @Before + public void createTable() { table = new InMemoryTable( new String[] {"Column1", "Column2", "Column3"}, new Object[] { diff --git a/Integrations/src/test/java/io/deephaven/integrations/learn/ScattererTest.java b/Integrations/src/test/java/io/deephaven/integrations/learn/ScattererTest.java index 51237535fba..5b584aa100e 100644 --- a/Integrations/src/test/java/io/deephaven/integrations/learn/ScattererTest.java +++ b/Integrations/src/test/java/io/deephaven/integrations/learn/ScattererTest.java @@ -4,19 +4,20 @@ package io.deephaven.integrations.learn; import io.deephaven.engine.table.impl.InMemoryTable; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import org.junit.*; import java.util.function.Function; public class ScattererTest { - private static InMemoryTable table; + @Rule + public final EngineCleanup framework = new EngineCleanup(); + private InMemoryTable table; - @BeforeClass - public static void setup() { + @Before + public void setup() { table = new InMemoryTable( new String[] {"Column1", "Column2", "Column3"}, new Object[] { diff --git a/Integrations/src/test/java/io/deephaven/integrations/learn/gather/NumPyTest.java b/Integrations/src/test/java/io/deephaven/integrations/learn/gather/NumPyTest.java index 8a6f7382729..18bee7e40f8 100644 --- a/Integrations/src/test/java/io/deephaven/integrations/learn/gather/NumPyTest.java +++ b/Integrations/src/test/java/io/deephaven/integrations/learn/gather/NumPyTest.java @@ -6,15 +6,17 @@ import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.InMemoryTable; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import org.junit.*; + import java.util.function.BiFunction; import java.util.function.Function; public class NumPyTest { - private static InMemoryTable table; + @Rule + public final EngineCleanup framework = new EngineCleanup(); + private static final String[] boolColNames = {"bool1", "bool2"}; private static final boolean[][] boolData = { new boolean[] {true, true, false, false}, @@ -69,12 +71,14 @@ public class NumPyTest { doubleData[0], doubleData[1] }; - @BeforeClass - public static void setup() { + private InMemoryTable table; + + @Before + public void setup() { table = new InMemoryTable(columnNames, columnData); } - public static ColumnSource[] getColSet(final String[] colNames) { + public ColumnSource[] getColSet(final String[] colNames) { ColumnSource[] rst = new ColumnSource[2]; for (int i = 0; i < 2; i++) { diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmBase.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmBase.java index b3e10500757..e4f15d81457 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmBase.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmBase.java @@ -5,10 +5,11 @@ import io.deephaven.base.verify.Assert; import io.deephaven.base.verify.Require; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.exceptions.CancellationException; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.table.impl.NotificationStepSource; import io.deephaven.engine.table.impl.remote.ConstructSnapshot; import io.deephaven.internal.log.LoggerFactory; @@ -66,17 +67,17 @@ interface QueryDataRetrievalOperation { */ public enum GetDataLockType { /** - * The UGP lock is already held. + * The UpdateGraph lock is already held. */ - UGP_LOCK_ALREADY_HELD, + UPDATE_GRAPH_LOCK_ALREADY_HELD, /** - * Acquire the UGP lock. + * Acquire the UpdateGraph's exclusive lock. */ - UGP_LOCK, + UPDATE_GRAPH_EXCLUSIVE_LOCK, /** - * Acquire an UGP read lock. + * Acquire the UpdateGraph's shared lock. */ - UGP_READ_LOCK, + UPDATE_GRAPH_SHARED_LOCK, /** * Use the (usually) lock-free snapshotting mechanism. */ @@ -199,10 +200,10 @@ interface MostRecentDataGetter { /** * Returns a {@code ThrowingConsumer} that takes a {@link QueryDataRetrievalOperation}, acquires a - * {@link UpdateGraphProcessor} lock based on the specified {@code lockType}, then executes the + * {@link PeriodicUpdateGraph} lock based on the specified {@code lockType}, then executes the * {@code FitDataPopulator} with the appropriate value for usePrev. * - * @param lockType The way of acquiring the {@code UpdateGraphProcessor} lock. + * @param lockType The way of acquiring the {@code PeriodicUpdateGraph} lock. * @return A function that runs an operation which accepts a {@link QueryDataRetrievalOperation} and a * {@link Table}. */ @@ -210,14 +211,18 @@ interface MostRecentDataGetter { protected static ThrowingBiConsumer getDoLockedConsumer( final GetDataLockType lockType) { switch (lockType) { - case UGP_LOCK_ALREADY_HELD: + case UPDATE_GRAPH_LOCK_ALREADY_HELD: return (queryDataRetrievalOperation, source) -> queryDataRetrievalOperation.retrieveData(false); - case UGP_LOCK: - return (queryDataRetrievalOperation, source) -> UpdateGraphProcessor.DEFAULT.exclusiveLock() - .doLocked(() -> queryDataRetrievalOperation.retrieveData(false)); - case UGP_READ_LOCK: - return (queryDataRetrievalOperation, source) -> UpdateGraphProcessor.DEFAULT.sharedLock() - .doLocked(() -> queryDataRetrievalOperation.retrieveData(false)); + case UPDATE_GRAPH_EXCLUSIVE_LOCK: + return (queryDataRetrievalOperation, source) -> { + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked( + () -> queryDataRetrievalOperation.retrieveData(false)); + }; + case UPDATE_GRAPH_SHARED_LOCK: + return (queryDataRetrievalOperation, source) -> { + ExecutionContext.getContext().getUpdateGraph().sharedLock().doLocked( + () -> queryDataRetrievalOperation.retrieveData(false)); + }; case SNAPSHOT: return (queryDataRetrievalOperation, source) -> { try { diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmOnDemand.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmOnDemand.java index 15d052dcf15..5ee40931ef7 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmOnDemand.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmOnDemand.java @@ -6,7 +6,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.configuration.Configuration; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.rowset.RowSet; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; @@ -18,7 +18,7 @@ /** * A ModelFarm implementation for evaluating a model upon request, retrieving a snapshot of data for all keys under a - * single {@link UpdateGraphProcessor} lock. + * single {@link PeriodicUpdateGraph} lock. * * @param The type of keys. * @param The type of data. @@ -31,7 +31,7 @@ public class ModelFarmOnDemand DO_LOCKED_FUNCTION = - getDoLockedConsumer(GetDataLockType.UGP_READ_LOCK); + getDoLockedConsumer(GetDataLockType.UPDATE_GRAPH_SHARED_LOCK); private static class QueueAndCallback { private final Queue queue; @@ -65,7 +65,7 @@ public ModelFarmOnDemand(int nThreads, Model model) { /** * Submit a request to {@link Model#exec execute} the {@link #model}. Can be called either with or without a - * UpdateGraphProcessor lock -- the decision of whether/how to acquire a lock is left to the + * PeriodicUpdateGraph lock -- the decision of whether/how to acquire a lock is left to the * {@link #DO_LOCKED_FUNCTION}. All keys represented by the data in the {@code dataManager} will be processed. * * @param dataManager The {@code RowDataManager} that will provide data for the pricing requests. @@ -79,7 +79,7 @@ public void requestUpdate(ROWDATAMANAGERTYPE dataManager, Runnable callback) { /** * Submit a request to {@link Model#exec execute} the {@link #model}. Can be called either with or without a - * UpdateGraphProcessor lock -- the decision of whether/how to acquire a lock is left to the + * PeriodicUpdateGraph lock -- the decision of whether/how to acquire a lock is left to the * {@link #DO_LOCKED_FUNCTION}. * * @param dataManager The {@code RowDataManager} that will provide data for the pricing requests. diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmTick.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmTick.java index b6b58410068..d59990d4278 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmTick.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmTick.java @@ -65,7 +65,7 @@ public ModelFarmTick(final int nThreads, final Model model, final ROWD super(nThreads, model, dataManager); this.maxQueueSize = maxQueueSize; this.queue = new ArrayDeque<>(this.maxQueueSize); - this.mostRecentDataGetter = getMostRecentDataFactory(GetDataLockType.UGP_LOCK_ALREADY_HELD); + this.mostRecentDataGetter = getMostRecentDataFactory(GetDataLockType.UPDATE_GRAPH_LOCK_ALREADY_HELD); } @Override diff --git a/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelFarm.java b/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelFarm.java index 5b89b4d2e2a..9b013ad84d1 100644 --- a/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelFarm.java +++ b/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelFarm.java @@ -5,16 +5,12 @@ import io.deephaven.base.verify.Require; import io.deephaven.configuration.Configuration; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.util.SafeCloseable; -import junit.framework.TestCase; import org.apache.commons.lang3.mutable.MutableLong; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -22,28 +18,12 @@ import static io.deephaven.util.QueryConstants.NULL_LONG; -public class TestModelFarm extends TestCase { +public class TestModelFarm extends RefreshingTableTestCase { private final long testShutdownTimeoutSecs = Configuration.getInstance().getIntegerWithDefault("TestModelFarm.testShutdownTimeoutSecs", 1); private final int nModelFarmThreadsDefault = 8; - private SafeCloseable executionContext; - - @Before - @Override - public void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @After - @Override - public void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } - /** * Ensure that the ModelFarm terminates immediately if it is shut down while not busy with an empty queue. */ diff --git a/ModelFarm/src/test/java/io/deephaven/modelfarm/util/TestModelFarmUtils.java b/ModelFarm/src/test/java/io/deephaven/modelfarm/util/TestModelFarmUtils.java index 840d3d8a0a1..7e7a6cd6b6e 100644 --- a/ModelFarm/src/test/java/io/deephaven/modelfarm/util/TestModelFarmUtils.java +++ b/ModelFarm/src/test/java/io/deephaven/modelfarm/util/TestModelFarmUtils.java @@ -3,36 +3,16 @@ */ package io.deephaven.modelfarm.util; -import io.deephaven.base.testing.BaseArrayTestCase; import io.deephaven.base.verify.RequirementFailure; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.time.DateTimeUtils; -import io.deephaven.util.SafeCloseable; import io.deephaven.vector.*; import io.deephaven.engine.util.TableTools; -import org.junit.After; -import org.junit.Before; import java.time.Instant; -public class TestModelFarmUtils extends BaseArrayTestCase { - - private SafeCloseable executionContext; - - @Before - @Override - public void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @After - @Override - public void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } +public class TestModelFarmUtils extends RefreshingTableTestCase { public void testRequireTable() { final Table t = TableTools.emptyTable(5).updateView("A=(int)i", "B=(long)i", "C=(double)i"); diff --git a/Plot/src/test/java/io/deephaven/plot/Filters/TestSelectables.java b/Plot/src/test/java/io/deephaven/plot/Filters/TestSelectables.java index 4da312e27a1..06b49230677 100644 --- a/Plot/src/test/java/io/deephaven/plot/Filters/TestSelectables.java +++ b/Plot/src/test/java/io/deephaven/plot/Filters/TestSelectables.java @@ -3,8 +3,8 @@ */ package io.deephaven.plot.Filters; -import io.deephaven.base.testing.BaseArrayTestCase; import io.deephaven.base.verify.RequirementFailure; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.plot.BaseFigureImpl; import io.deephaven.plot.filters.SelectableDataSet; import io.deephaven.plot.filters.SelectableDataSetOneClick; @@ -13,30 +13,44 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.PartitionedTable; +import junit.framework.TestCase; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; import java.util.*; -public class TestSelectables extends BaseArrayTestCase { +public class TestSelectables { + + @Rule + final public EngineCleanup framework = new EngineCleanup(); + private final String[] categories = {"A", "B", "C"}; private final double[] values = {1, 2, 3}; private final String byColumn = "Cats"; private final String valueColumn = "Values"; - private final Table table = - TableTools.newTable(TableTools.col(byColumn, categories), TableTools.doubleCol(valueColumn, values)); private final BaseFigureImpl figure = new BaseFigureImpl(); + private Table table; + + @Before + public void setUp() { + table = TableTools.newTable(TableTools.col(byColumn, categories), TableTools.doubleCol(valueColumn, values)); + } + + @Test public void testFilteredTableOneClick() { try { Selectables.oneClick((Table) null, byColumn); - fail("Expected an exception"); + TestCase.fail("Expected an exception"); } catch (RequirementFailure e) { - assertTrue(e.getMessage().contains("null")); + TestCase.assertTrue(e.getMessage().contains("null")); } try { Selectables.oneClick((Table) null); - fail("Expected an exception"); + TestCase.fail("Expected an exception"); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("empty")); + TestCase.assertTrue(e.getMessage().contains("empty")); } testFilteredTable(Selectables.oneClick(table, byColumn)); testFilteredTable(new SelectableDataSetOneClick(table.partitionBy(byColumn))); @@ -56,10 +70,10 @@ private void testPartitionedTableEquals(final PartitionedTable t1, final Partiti } private void testTableEquals(final Table t1, final Table t2) { - assertNotNull(t1); - assertNotNull(t2); + TestCase.assertNotNull(t1); + TestCase.assertNotNull(t2); final List columnNames = t1.getDefinition().getColumnNames(); - assertEquals(columnNames.size(), t2.numColumns()); + TestCase.assertEquals(columnNames.size(), t2.numColumns()); t2.hasColumns(columnNames); } } diff --git a/Plot/src/test/java/io/deephaven/plot/TestBaseFigureImpl.java b/Plot/src/test/java/io/deephaven/plot/TestBaseFigureImpl.java index 0ba8ebd6b67..55398c98c8c 100644 --- a/Plot/src/test/java/io/deephaven/plot/TestBaseFigureImpl.java +++ b/Plot/src/test/java/io/deephaven/plot/TestBaseFigureImpl.java @@ -3,24 +3,11 @@ */ package io.deephaven.plot; -import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.gui.color.Color; import junit.framework.TestCase; -public class TestBaseFigureImpl extends BaseArrayTestCase { - @Override - public void setUp() throws Exception { - super.setUp(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } +public class TestBaseFigureImpl extends RefreshingTableTestCase { public void testSubplot() { BaseFigureImpl fig1 = new BaseFigureImpl(); diff --git a/Plot/src/test/java/io/deephaven/plot/TestSeriesLocation.java b/Plot/src/test/java/io/deephaven/plot/TestSeriesLocation.java index 08814c61197..2ed4de4b757 100644 --- a/Plot/src/test/java/io/deephaven/plot/TestSeriesLocation.java +++ b/Plot/src/test/java/io/deephaven/plot/TestSeriesLocation.java @@ -3,32 +3,16 @@ */ package io.deephaven.plot; -import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.plot.datasets.multiseries.MultiXYSeries; import io.deephaven.plot.datasets.xy.XYDataSeriesArray; -import io.deephaven.util.SafeCloseable; /** * Test SeriesLocation. */ -public class TestSeriesLocation extends BaseArrayTestCase { - - private SafeCloseable executionContext; - - @Override - public void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } +public class TestSeriesLocation extends RefreshingTableTestCase { public void testLocation() { final Table t = TableTools.emptyTable(100).update("A=i%2==0?`A`:`B`", "X=1.0*i", "Y=1.0*i*i"); diff --git a/Plot/src/test/java/io/deephaven/plot/composite/TestScatterPlotMatrix.java b/Plot/src/test/java/io/deephaven/plot/composite/TestScatterPlotMatrix.java index 94bfed908af..ff5af38f762 100644 --- a/Plot/src/test/java/io/deephaven/plot/composite/TestScatterPlotMatrix.java +++ b/Plot/src/test/java/io/deephaven/plot/composite/TestScatterPlotMatrix.java @@ -3,17 +3,16 @@ */ package io.deephaven.plot.composite; -import io.deephaven.base.testing.BaseArrayTestCase; import io.deephaven.base.verify.RequirementFailure; -import io.deephaven.engine.context.TestExecutionContext; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.util.ColumnHolder; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.plot.FigureImpl; import io.deephaven.plot.datasets.xy.XYDataSeriesInternal; import io.deephaven.plot.filters.SelectableDataSetOneClick; -import io.deephaven.util.SafeCloseable; import junit.framework.TestCase; import java.util.concurrent.atomic.AtomicInteger; @@ -21,29 +20,12 @@ import static io.deephaven.util.QueryConstants.NULL_INT; import static io.deephaven.util.QueryConstants.NULL_LONG; -public class TestScatterPlotMatrix extends BaseArrayTestCase { +public class TestScatterPlotMatrix extends RefreshingTableTestCase { private final int length = 10; - private SafeCloseable executionContext; - - @Override - public void setUp() throws Exception { - super.setUp(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - executionContext.close(); - } - public void testScatterPlotMatrix() { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.startCycleForUnitTests(); final int[][] ints = new int[length][length]; final double[][] doubles = new double[length][length]; final long[][] longs = new long[length][length]; @@ -119,7 +101,7 @@ public void testScatterPlotMatrix() { } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("dimension")); } - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); } public void testPointSize() { diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesMap.java b/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesMap.java index c1c69c06d03..746746f8ef0 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesMap.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesMap.java @@ -3,14 +3,24 @@ */ package io.deephaven.plot.datasets.category; -import io.deephaven.base.testing.BaseArrayTestCase; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.plot.AxesImpl; import io.deephaven.plot.BaseFigureImpl; import io.deephaven.plot.datasets.data.*; import io.deephaven.engine.util.TableTools; import junit.framework.TestCase; +import org.junit.Rule; +import org.junit.Test; + +import static junit.framework.TestCase.assertEquals; +import static junit.framework.TestCase.assertNull; +import static junit.framework.TestCase.assertTrue; + +public class TestCategoryDataSeriesMap { + + @Rule + final public EngineCleanup framework = new EngineCleanup(); -public class TestCategoryDataSeriesMap extends BaseArrayTestCase { private static final int SIZE = 500; private static final BaseFigureImpl f = new BaseFigureImpl(); private static final AxesImpl a = f.newChart().newAxes(); @@ -37,6 +47,7 @@ public class TestCategoryDataSeriesMap extends BaseArrayTestCase { private static final IndexableDataDouble dxMismatched = new IndexableDataDouble(dataXMisMatched, false, null); private static final IndexableNumericData dy = new IndexableNumericDataArrayDouble(dataY, null); + @Test public void testCategoryDataSeriesMap() { CategoryDataSeriesMap map = new CategoryDataSeriesMap(a, 1, "Test", dx, dy); @@ -80,6 +91,7 @@ public void testCategoryDataSeriesMap() { } } + @Test public void testCopy() { final CategoryDataSeriesMap map1 = new CategoryDataSeriesMap(a, 1, "Test", dx, dy); final CategoryDataSeriesMap map1Copy = map1.copy(new BaseFigureImpl().newChart().newAxes()); diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesPartitionedTable.java b/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesPartitionedTable.java index ea370182736..53532b669ff 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesPartitionedTable.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesPartitionedTable.java @@ -3,15 +3,24 @@ */ package io.deephaven.plot.datasets.category; -import io.deephaven.base.testing.BaseArrayTestCase; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.plot.*; import io.deephaven.plot.util.PlotUtils; import io.deephaven.plot.util.tables.TableHandle; import io.deephaven.engine.table.Table; import io.deephaven.engine.util.TableTools; +import org.junit.Rule; +import org.junit.Test; -public class TestCategoryDataSeriesPartitionedTable extends BaseArrayTestCase { +import static junit.framework.TestCase.assertEquals; +import static junit.framework.TestCase.assertNull; +public class TestCategoryDataSeriesPartitionedTable { + + @Rule + final public EngineCleanup framework = new EngineCleanup(); + + @Test public void testCopy() { final BaseFigureImpl figure = new BaseFigureImpl(); final ChartImpl chart = figure.newChart(); diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesSwappablePartitionedTable.java b/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesSwappablePartitionedTable.java index cde5d07aa9c..06880855d65 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesSwappablePartitionedTable.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/category/TestCategoryDataSeriesSwappablePartitionedTable.java @@ -3,30 +3,12 @@ */ package io.deephaven.plot.datasets.category; -import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.plot.BaseFigureImpl; -import io.deephaven.util.SafeCloseable; -public class TestCategoryDataSeriesSwappablePartitionedTable extends BaseArrayTestCase { - - private SafeCloseable executionContext; - - @Override - public void setUp() throws Exception { - super.setUp(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } +public class TestCategoryDataSeriesSwappablePartitionedTable extends RefreshingTableTestCase { public void testCategoryDataSeriesPartitionedTable() { final BaseFigureImpl figure = new BaseFigureImpl(); diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/data/TestAssociativeData.java b/Plot/src/test/java/io/deephaven/plot/datasets/data/TestAssociativeData.java index 454c692a9d5..7a84be8f019 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/data/TestAssociativeData.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/data/TestAssociativeData.java @@ -3,33 +3,34 @@ */ package io.deephaven.plot.datasets.data; -import io.deephaven.base.testing.BaseArrayTestCase; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.plot.errors.PlotIllegalArgumentException; import io.deephaven.plot.util.tables.TableHandle; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import junit.framework.TestCase; import java.util.HashMap; import java.util.Map; -public class TestAssociativeData extends BaseArrayTestCase { +public class TestAssociativeData extends RefreshingTableTestCase { private final String[] cats = {"A", "B"}; private final int[] values = {1, 2}; private final Integer x = values[0]; - private final Table t = TableTools.newTable(TableTools.col("Cat", cats), TableTools.intCol("Values", values)); - private final TableHandle tableHandle = new TableHandle(t, "Cat", "Values"); - private final AssociativeDataTable associativeDataTable = - new AssociativeDataTable<>(tableHandle, "Cat", "Values", String.class, Integer.class, null); + private final AssociativeDataHashMap dataHashMap = new AssociativeDataHashMap<>(null); + private Table t; + private TableHandle tableHandle; + private AssociativeDataTable associativeDataTable; + @Override public void setUp() throws Exception { super.setUp(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - + t = TableTools.newTable(TableTools.col("Cat", cats), TableTools.intCol("Values", values)); + tableHandle = new TableHandle(t, "Cat", "Values"); + associativeDataTable = + new AssociativeDataTable<>(tableHandle, "Cat", "Values", String.class, Integer.class, null); // prime the listeners associativeDataTable.get(null); } @@ -37,7 +38,6 @@ public void setUp() throws Exception { @Override public void tearDown() throws Exception { super.tearDown(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); } public void testAssociativeDataHashMap() { diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/data/TestIndexableData.java b/Plot/src/test/java/io/deephaven/plot/datasets/data/TestIndexableData.java index d8bf2ea1862..f4a3cda36cd 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/data/TestIndexableData.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/data/TestIndexableData.java @@ -3,12 +3,15 @@ */ package io.deephaven.plot.datasets.data; -import io.deephaven.base.testing.BaseArrayTestCase; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.plot.BaseFigureImpl; import io.deephaven.plot.util.tables.*; import io.deephaven.engine.table.Table; import io.deephaven.engine.util.TableTools; import io.deephaven.time.DateTimeUtils; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; import java.time.Instant; import java.util.ArrayList; @@ -17,8 +20,13 @@ import java.util.List; import static io.deephaven.util.QueryConstants.*; +import static junit.framework.TestCase.assertEquals; +import static junit.framework.TestCase.assertNull; -public class TestIndexableData extends BaseArrayTestCase { +public class TestIndexableData { + + @Rule + final public EngineCleanup framework = new EngineCleanup(); private final int SIZE = 200; private final short[] shortArray = new short[SIZE]; @@ -31,8 +39,8 @@ public class TestIndexableData extends BaseArrayTestCase { private final Date[] dateArray = new Date[SIZE]; private final Instant[] instantsArray = new Instant[SIZE]; - @Override - public void setUp() throws Exception { + @Before + public void setUp() { short i = 0; shortArray[i] = NULL_SHORT; intArray[i] = NULL_INT; @@ -57,6 +65,7 @@ public void setUp() throws Exception { } } + @Test public void testIndexableNumericData() { final IndexableNumericData shortData = new IndexableNumericDataArrayShort(shortArray, null); final IndexableNumericData intData = new IndexableNumericDataArrayInt(intArray, null); @@ -71,6 +80,7 @@ public void testIndexableNumericData() { checkDateData(dateData); } + @Test public void testIndexableDouble() { IndexableData shortData = new IndexableDataDouble(shortArray, false, null); IndexableData intData = new IndexableDataDouble(intArray, false, null); @@ -89,11 +99,13 @@ public void testIndexableDouble() { checkData(null, true, shortData, intData, doubleData, longData, floatData, numberData); } + @Test public void testIndexableInteger() { final IndexableData intData = new IndexableDataInteger(intArray, null); checkData(null, true, intData); } + @Test public void testIndexableDataTable() { final Table t = TableTools.newTable(TableTools.shortCol("shortCol", shortArray), TableTools.intCol("intCol", intArray), TableTools.doubleCol("doubleCol", doubleArray), @@ -126,6 +138,7 @@ public void testIndexableDataTable() { checkData(null, false, shortData, intData, doubleData, floatData, longData, numberData); } + @Test public void testIndexableDataInfinite() { final IndexableDataInfinite indexableDataInfinite = new IndexableDataInfinite<>(new IndexableDataDouble(doubleArray, true, null)); @@ -139,6 +152,7 @@ public void testIndexableDataInfinite() { assertNull(indexableDataInfinite.get(doubleArray.length + 1)); } + @Test public void testIndexableDataWithDefault() { final IndexableDataWithDefault indexableDataWithDefault = new IndexableDataWithDefault(null); @@ -198,6 +212,7 @@ private void checkDateData(IndexableNumericData... datasets) { } } + @Test public void testDoubleStream() { final double[] data = {1, 2, 3, 4}; final double target = Arrays.stream(data).sum(); diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/errorbar/CategoryErrorBarDataSeriesPartitionedTableTest.java b/Plot/src/test/java/io/deephaven/plot/datasets/errorbar/CategoryErrorBarDataSeriesPartitionedTableTest.java index cf6f668e91e..ca0a01c3df4 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/errorbar/CategoryErrorBarDataSeriesPartitionedTableTest.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/errorbar/CategoryErrorBarDataSeriesPartitionedTableTest.java @@ -3,7 +3,7 @@ */ package io.deephaven.plot.datasets.errorbar; -import io.deephaven.base.testing.BaseArrayTestCase; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.plot.*; import io.deephaven.plot.datasets.categoryerrorbar.CategoryErrorBarDataSeriesInternal; import io.deephaven.plot.datasets.categoryerrorbar.CategoryErrorBarDataSeriesPartitionedTable; @@ -11,9 +11,18 @@ import io.deephaven.plot.util.tables.TableHandle; import io.deephaven.engine.table.Table; import io.deephaven.engine.util.TableTools; +import org.junit.Rule; +import org.junit.Test; -public class CategoryErrorBarDataSeriesPartitionedTableTest extends BaseArrayTestCase { +import static junit.framework.TestCase.assertEquals; +import static junit.framework.TestCase.assertNull; +public class CategoryErrorBarDataSeriesPartitionedTableTest { + + @Rule + final public EngineCleanup framework = new EngineCleanup(); + + @Test public void testCopy() { final BaseFigureImpl figure = new BaseFigureImpl(); final ChartImpl chart = figure.newChart(); diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/errorbar/XYErrorBarDataSeriesTableArrayTest.java b/Plot/src/test/java/io/deephaven/plot/datasets/errorbar/XYErrorBarDataSeriesTableArrayTest.java index 586502f85d2..7cb6a98a8bc 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/errorbar/XYErrorBarDataSeriesTableArrayTest.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/errorbar/XYErrorBarDataSeriesTableArrayTest.java @@ -3,11 +3,12 @@ */ package io.deephaven.plot.datasets.errorbar; -import io.deephaven.base.testing.BaseArrayTestCase; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.plot.BaseFigureImpl; import io.deephaven.plot.ChartImpl; @@ -18,20 +19,7 @@ import static io.deephaven.engine.testutil.TstUtils.*; import static io.deephaven.engine.util.TableTools.col; -public class XYErrorBarDataSeriesTableArrayTest extends BaseArrayTestCase { - - @Override - public void setUp() throws Exception { - super.setUp(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } +public class XYErrorBarDataSeriesTableArrayTest extends RefreshingTableTestCase { public void testXYErrorBarDataSeriesTableArray() { final BaseFigureImpl figure = new BaseFigureImpl(); @@ -95,7 +83,8 @@ public void testRefreshingTable() { assertEquals(series.getX(4), Double.NaN); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(refreshingTable, i(7, 9), col("x", 4, 5), col("y", 4, 5), col("yLow", 3, 4), col("yHigh", 5, 6)); refreshingTable.notifyListeners(i(7, 9), i(), i()); }); diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/ohlc/TestOHLCDataSeries.java b/Plot/src/test/java/io/deephaven/plot/datasets/ohlc/TestOHLCDataSeries.java index 789df4ce7df..ca92df32cd2 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/ohlc/TestOHLCDataSeries.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/ohlc/TestOHLCDataSeries.java @@ -3,9 +3,8 @@ */ package io.deephaven.plot.datasets.ohlc; -import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.gui.color.Color; import io.deephaven.plot.BaseFigureImpl; @@ -17,12 +16,11 @@ import io.deephaven.plot.util.tables.TableBackedPartitionedTableHandle; import io.deephaven.plot.util.tables.TableHandle; import io.deephaven.time.DateTimeUtils; -import io.deephaven.util.SafeCloseable; import java.time.Instant; import java.util.ArrayList; -public class TestOHLCDataSeries extends BaseArrayTestCase { +public class TestOHLCDataSeries extends RefreshingTableTestCase { private final Instant[] datesA = { DateTimeUtils.epochNanosToInstant(DateTimeUtils.DAY), DateTimeUtils.epochNanosToInstant(2 * DateTimeUtils.DAY), @@ -44,20 +42,6 @@ public class TestOHLCDataSeries extends BaseArrayTestCase { private final OHLCDataSeriesInternal dataSeries2 = new OHLCDataSeriesArray( new BaseFigureImpl().newChart().newAxes(), 1, "Test2", dates, close, high, low, open); - private SafeCloseable executionContext; - - @Override - public void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } - public void testOHLCDataSeriesArray() { checkOHLCDataSeriesArray(dataSeries, datesA, openA, highA, lowA, closeA); checkOHLCDataSeriesArray(dataSeries2, datesA, closeA, highA, lowA, openA); diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/xy/TestAbstractXYDataSeries.java b/Plot/src/test/java/io/deephaven/plot/datasets/xy/TestAbstractXYDataSeries.java index a0860a73bf7..8d7acc93aee 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/xy/TestAbstractXYDataSeries.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/xy/TestAbstractXYDataSeries.java @@ -3,9 +3,8 @@ */ package io.deephaven.plot.datasets.xy; -import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.gui.color.Color; import io.deephaven.gui.shape.JShapes; @@ -17,7 +16,6 @@ import io.deephaven.plot.datasets.data.IndexableDataArray; import io.deephaven.plot.datasets.data.IndexableDataInteger; import io.deephaven.plot.util.PlotUtils; -import io.deephaven.util.SafeCloseable; import junit.framework.TestCase; import java.util.Collections; @@ -25,7 +23,7 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; -public class TestAbstractXYDataSeries extends BaseArrayTestCase { +public class TestAbstractXYDataSeries extends RefreshingTableTestCase { private static class TestAXYDS extends AbstractXYDataSeries { @@ -59,20 +57,6 @@ public int size() { } } - private SafeCloseable executionContext; - - @Override - public void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } - public void testVisibility() { final TestAXYDS data = new TestAXYDS(); diff --git a/Plot/src/test/java/io/deephaven/plot/datasets/xy/TestXYDataSeriesArray.java b/Plot/src/test/java/io/deephaven/plot/datasets/xy/TestXYDataSeriesArray.java index 2581a1248b3..6d709befef4 100644 --- a/Plot/src/test/java/io/deephaven/plot/datasets/xy/TestXYDataSeriesArray.java +++ b/Plot/src/test/java/io/deephaven/plot/datasets/xy/TestXYDataSeriesArray.java @@ -3,9 +3,8 @@ */ package io.deephaven.plot.datasets.xy; -import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.gui.color.Color; import io.deephaven.plot.BaseFigureImpl; @@ -17,26 +16,11 @@ import io.deephaven.plot.util.tables.SwappableTable; import io.deephaven.plot.util.tables.TableBackedPartitionedTableHandle; import io.deephaven.plot.util.tables.TableHandle; -import io.deephaven.util.SafeCloseable; import junit.framework.TestCase; import java.util.ArrayList; -public class TestXYDataSeriesArray extends BaseArrayTestCase { - - private SafeCloseable executionContext; - - @Override - public void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } +public class TestXYDataSeriesArray extends RefreshingTableTestCase { public void testXYDataSeriesArray() { ChartImpl chart = new BaseFigureImpl().newChart(); diff --git a/Plot/src/test/java/io/deephaven/plot/util/TestArgumentValidations.java b/Plot/src/test/java/io/deephaven/plot/util/TestArgumentValidations.java index aa0a63bce14..0460e7f6efe 100644 --- a/Plot/src/test/java/io/deephaven/plot/util/TestArgumentValidations.java +++ b/Plot/src/test/java/io/deephaven/plot/util/TestArgumentValidations.java @@ -3,7 +3,7 @@ */ package io.deephaven.plot.util; -import io.deephaven.base.testing.BaseArrayTestCase; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.gui.color.Color; import io.deephaven.plot.datasets.data.IndexableNumericData; import io.deephaven.plot.datasets.data.IndexableNumericDataArrayInt; @@ -12,12 +12,22 @@ import io.deephaven.engine.util.TableTools; import io.deephaven.time.DateTimeUtils; import junit.framework.TestCase; +import org.junit.Rule; +import org.junit.Test; import java.time.Instant; import java.util.Date; -public class TestArgumentValidations extends BaseArrayTestCase { +import static junit.framework.TestCase.assertEquals; +import static junit.framework.TestCase.assertFalse; +import static junit.framework.TestCase.assertTrue; +public class TestArgumentValidations { + + @Rule + final public EngineCleanup framework = new EngineCleanup(); + + @Test public void testArgumentValidations() { final String NON_NULL = "TEST"; final String message = "message"; @@ -247,6 +257,7 @@ public void testArgumentValidations() { } } + @Test public void testNaNSafeEquals() { assertTrue(ArgumentValidations.nanSafeEquals(5, 5)); assertFalse(ArgumentValidations.nanSafeEquals(4, 5)); diff --git a/Plot/src/test/java/io/deephaven/plot/util/tables/TestColumnHandlerFactory.java b/Plot/src/test/java/io/deephaven/plot/util/tables/TestColumnHandlerFactory.java index ae92ae24329..f4b20fbb78b 100644 --- a/Plot/src/test/java/io/deephaven/plot/util/tables/TestColumnHandlerFactory.java +++ b/Plot/src/test/java/io/deephaven/plot/util/tables/TestColumnHandlerFactory.java @@ -3,7 +3,7 @@ */ package io.deephaven.plot.util.tables; -import io.deephaven.base.testing.BaseArrayTestCase; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.plot.errors.PlotIllegalArgumentException; import io.deephaven.gui.color.Color; import io.deephaven.gui.color.Paint; @@ -11,13 +11,20 @@ import io.deephaven.engine.util.TableTools; import io.deephaven.time.DateTimeUtils; import junit.framework.TestCase; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; import java.time.Instant; import java.util.Date; import static io.deephaven.util.QueryConstants.*; +import static junit.framework.TestCase.*; -public class TestColumnHandlerFactory extends BaseArrayTestCase { +public class TestColumnHandlerFactory { + + @Rule + final public EngineCleanup framework = new EngineCleanup(); private final int[] ints = {NULL_INT, 2, 3}; private final float[] floats = {NULL_FLOAT, 2, 3}; @@ -35,28 +42,34 @@ public class TestColumnHandlerFactory extends BaseArrayTestCase { null, DateTimeUtils.epochNanosToInstant(1), DateTimeUtils.epochNanosToInstant(2)}; private final Paint[] paints = {null, new Color(100, 0, 0), new Color(0, 100, 0)}; private final String[] strings = {"A", "B", "C"}; - private final Table table = TableTools.newTable( - TableTools.intCol("ints", ints), - TableTools.floatCol("floats", floats), - TableTools.longCol("longs", longs), - TableTools.doubleCol("doubles", doubles), - TableTools.shortCol("shorts", shorts), - TableTools.col("Shorts", Shorts), - TableTools.col("Integers", Integers), - TableTools.col("Longs", Longs), - TableTools.col("Floats", Floats), - TableTools.col("Doubles", Doubles), - TableTools.col("Numbers", Numbers), - TableTools.col("Dates", dates), - TableTools.col("Instants", instants), - TableTools.col("Paints", paints), - TableTools.col("Strings", strings)).ungroup(); - - private final TableHandle tableHandle = new TableHandle(table, - "ints", "floats", "longs", "doubles", "shorts", "Shorts", "Integers", "Longs", "Floats", "Doubles", - "Numbers", "Dates", "Instants", "Paints", "Strings"); + private Table table; + private TableHandle tableHandle; + + @Before + public void setUp() { + table = TableTools.newTable( + TableTools.intCol("ints", ints), + TableTools.floatCol("floats", floats), + TableTools.longCol("longs", longs), + TableTools.doubleCol("doubles", doubles), + TableTools.shortCol("shorts", shorts), + TableTools.col("Shorts", Shorts), + TableTools.col("Integers", Integers), + TableTools.col("Longs", Longs), + TableTools.col("Floats", Floats), + TableTools.col("Doubles", Doubles), + TableTools.col("Numbers", Numbers), + TableTools.col("Dates", dates), + TableTools.col("Instants", instants), + TableTools.col("Paints", paints), + TableTools.col("Strings", strings)).ungroup(); + tableHandle = new TableHandle(table, + "ints", "floats", "longs", "doubles", "shorts", "Shorts", "Integers", "Longs", "Floats", "Doubles", + "Numbers", "Dates", "Instants", "Paints", "Strings"); + } + @Test public void testTypeClassification() { assertTrue(ColumnHandlerFactory.TypeClassification.INTEGER.isNumeric()); assertTrue(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT.isNumeric()); @@ -66,6 +79,7 @@ public void testTypeClassification() { assertFalse(ColumnHandlerFactory.TypeClassification.OBJECT.isNumeric()); } + @Test public void testNumericColumnHandlerHandle() { try { ColumnHandlerFactory.newNumericHandler(tableHandle, null, null); @@ -146,6 +160,7 @@ public void testNumericColumnHandlerHandle() { } } + @Test public void testNumericColumnHandlerTable() { try { ColumnHandlerFactory.newNumericHandler(table, null, null); @@ -231,6 +246,7 @@ public void testNumericColumnHandlerTable() { } } + @Test public void testComparableHandlerHandle() { try { ColumnHandlerFactory.newComparableHandler(tableHandle, null, null); @@ -272,6 +288,7 @@ public void testComparableHandlerHandle() { } } + @Test public void testComparableHandlerTable() { try { ColumnHandlerFactory.newComparableHandler(table, null, null); @@ -312,6 +329,7 @@ public void testComparableHandlerTable() { } } + @Test public void testObjectHandlerHandle() { try { ColumnHandlerFactory.newObjectHandler(tableHandle, null, null); @@ -346,6 +364,7 @@ public void testObjectHandlerHandle() { } } + @Test public void testObjectHandlerTable() { try { ColumnHandlerFactory.newObjectHandler(table, null, null); diff --git a/Util/src/main/java/io/deephaven/util/ExecutionContextRegistrationException.java b/Util/src/main/java/io/deephaven/util/ExecutionContextRegistrationException.java new file mode 100644 index 00000000000..9816ac6278b --- /dev/null +++ b/Util/src/main/java/io/deephaven/util/ExecutionContextRegistrationException.java @@ -0,0 +1,28 @@ +package io.deephaven.util; + +import io.deephaven.UncheckedDeephavenException; +import io.deephaven.internal.log.LoggerFactory; +import io.deephaven.io.logger.Logger; +import org.jetbrains.annotations.NotNull; + +/** + * This exception is thrown when the {@link ThreadLocal} ExecutionContext or any of its components are accessed if they + * have not been properly initialized. + */ +public final class ExecutionContextRegistrationException extends UncheckedDeephavenException { + + private static final Logger logger = LoggerFactory.getLogger(ExecutionContextRegistrationException.class); + + public ExecutionContextRegistrationException(@NotNull final String missingComponent) { + super("No ExecutionContext registered, or current ExecutionContext has no " + missingComponent); + } + + public static ExecutionContextRegistrationException onFailedComponentAccess(@NotNull final String componentName) { + logger.error().append("No ExecutionContext registered, or current ExecutionContext has no ") + .append(componentName).append('.') + .append(" If this is being run in a thread, did you specify an ExecutionContext for the thread?") + .append(" Please refer to the documentation on ExecutionContext for details.") + .endl(); + return new ExecutionContextRegistrationException(componentName); + } +} diff --git a/Util/src/main/java/io/deephaven/util/NoExecutionContextRegisteredException.java b/Util/src/main/java/io/deephaven/util/NoExecutionContextRegisteredException.java deleted file mode 100644 index 347305a0693..00000000000 --- a/Util/src/main/java/io/deephaven/util/NoExecutionContextRegisteredException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.deephaven.util; - -import io.deephaven.UncheckedDeephavenException; - -/** - * This exception is thrown when the thread-local QueryScope, QueryLibrary, or QueryCompiler are accessed from user-code - * without an explicit ExecutionContext. - */ -public final class NoExecutionContextRegisteredException extends UncheckedDeephavenException { - public NoExecutionContextRegisteredException() { - super("ExecutionContext not registered"); - } -} diff --git a/Util/src/main/java/io/deephaven/util/thread/ThreadInitializationFactory.java b/Util/src/main/java/io/deephaven/util/thread/ThreadInitializationFactory.java index 2d5b963f4a2..dddb9c380fc 100644 --- a/Util/src/main/java/io/deephaven/util/thread/ThreadInitializationFactory.java +++ b/Util/src/main/java/io/deephaven/util/thread/ThreadInitializationFactory.java @@ -24,7 +24,7 @@ public interface ThreadInitializationFactory { } catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | InstantiationException | IllegalAccessException e) { throw new IllegalArgumentException( - "Error instantiating initializer " + type + ", please check configuration"); + "Error instantiating initializer " + type + ", please check configuration", e); } }) .collect(Collectors.toUnmodifiableList()); diff --git a/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java b/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java index b9b92aa3687..b424e179eaa 100644 --- a/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java +++ b/engine/api/src/main/java/io/deephaven/engine/table/ElementSource.java @@ -90,7 +90,7 @@ public interface ElementSource { /** * Get the previous value at the rowKey. Previous values are used during an - * {@link io.deephaven.engine.updategraph.UpdateGraphProcessor UGP} + * {@link io.deephaven.engine.updategraph.UpdateGraph UG} * {@link io.deephaven.engine.updategraph.LogicalClock.State#Updating update} cycle to process changes in data. * During {@link io.deephaven.engine.updategraph.LogicalClock.State#Idle normal} operation previous values will be * identical to {@link #get(long) current} values. RowKeys that were not present are undefined. diff --git a/engine/api/src/main/java/io/deephaven/engine/table/ShiftObliviousListener.java b/engine/api/src/main/java/io/deephaven/engine/table/ShiftObliviousListener.java index 88cbadd6dd0..488a36bd67c 100644 --- a/engine/api/src/main/java/io/deephaven/engine/table/ShiftObliviousListener.java +++ b/engine/api/src/main/java/io/deephaven/engine/table/ShiftObliviousListener.java @@ -15,10 +15,10 @@ public interface ShiftObliviousListener extends TableListener { * Process notification of table changes. * *

- * The ShiftObliviousListener onUpdate call executes within the - * {@link io.deephaven.engine.updategraph.UpdateGraphProcessor} refresh loop. Any tables used within the listener's - * onUpdate call must have already been refreshed. Using the typical pattern of a Listener that is listening to a - * single table, with {@link Table#addUpdateListener(ShiftObliviousListener)}, this is trivially true. + * The ShiftObliviousListener onUpdate call executes within the {@link io.deephaven.engine.updategraph.UpdateGraph} + * refresh loop. Any tables used within the listener's onUpdate call must have already been refreshed. Using the + * typical pattern of a Listener that is listening to a single table, with + * {@link Table#addUpdateListener(ShiftObliviousListener)}, this is trivially true. *

* *

diff --git a/engine/api/src/main/java/io/deephaven/engine/table/Table.java b/engine/api/src/main/java/io/deephaven/engine/table/Table.java index c93326d0e42..ddee5f6df00 100644 --- a/engine/api/src/main/java/io/deephaven/engine/table/Table.java +++ b/engine/api/src/main/java/io/deephaven/engine/table/Table.java @@ -684,31 +684,33 @@ RollupTable rollup(Collection aggregations, boolean inclu // Methods for refreshing tables // ----------------------------------------------------------------------------------------------------------------- - // TODO (https://github.com/deephaven/deephaven-core/pull/3506): Update this advice for multiple update graphs, - // and on the other overloads, as well. /** *

* Wait for updates to this Table. Should not be invoked from a {@link TableListener} or other - * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification}. + * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification} on this Table's + * {@link #getUpdateGraph() update graph}. It may be suitable to wait from another update graph if doing so does not + * introduce any cycles. *

- * In some implementations, this call may also terminate in case of interrupt or spurious wakeup (see - * java.util.concurrent.locks.Condition#await()). + * In some implementations, this call may also terminate in case of interrupt or spurious wakeup. * * @throws InterruptedException In the event this thread is interrupted + * @see java.util.concurrent.locks.Condition#await() */ void awaitUpdate() throws InterruptedException; /** *

* Wait for updates to this Table. Should not be invoked from a {@link TableListener} or other - * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification}. + * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification} on this Table's + * {@link #getUpdateGraph() update graph}. It may be suitable to wait from another update graph if doing so does not + * introduce any cycles. *

- * In some implementations, this call may also terminate in case of interrupt or spurious wakeup (see - * java.util.concurrent.locks.Condition#await()). + * In some implementations, this call may also terminate in case of interrupt or spurious wakeup. * * @param timeout The maximum time to wait in milliseconds. * @return false if the timeout elapses without notification, true otherwise. * @throws InterruptedException In the event this thread is interrupted + * @see java.util.concurrent.locks.Condition#await() */ boolean awaitUpdate(long timeout) throws InterruptedException; diff --git a/engine/api/src/main/java/io/deephaven/engine/table/TableUpdateListener.java b/engine/api/src/main/java/io/deephaven/engine/table/TableUpdateListener.java index 13e0f09fdb8..e0a49f4453d 100644 --- a/engine/api/src/main/java/io/deephaven/engine/table/TableUpdateListener.java +++ b/engine/api/src/main/java/io/deephaven/engine/table/TableUpdateListener.java @@ -14,10 +14,10 @@ public interface TableUpdateListener extends TableListener { * Process notification of table changes. * *

- * The TableUpdateListener onUpdate call executes within the - * {@link io.deephaven.engine.updategraph.UpdateGraphProcessor} refresh loop. Any tables used within the listener's - * onUpdate call must have already been refreshed. Using the typical pattern of a Listener that is listening to a - * single table, with {@link Table#addUpdateListener(TableUpdateListener)}, this is trivially true. + * The TableUpdateListener onUpdate call executes within the {@link io.deephaven.engine.updategraph.UpdateGraph} + * refresh loop. Any tables used within the listener's onUpdate call must have already been refreshed. Using the + * typical pattern of a Listener that is listening to a single table, with + * {@link Table#addUpdateListener(TableUpdateListener)}, this is trivially true. *

* *

diff --git a/engine/api/src/test/java/io/deephaven/engine/table/iterators/TestColumnIterators.java b/engine/api/src/test/java/io/deephaven/engine/table/iterators/TestColumnIterators.java index 30da4e17f3d..bd9e0fdfb2b 100644 --- a/engine/api/src/test/java/io/deephaven/engine/table/iterators/TestColumnIterators.java +++ b/engine/api/src/test/java/io/deephaven/engine/table/iterators/TestColumnIterators.java @@ -7,10 +7,9 @@ import io.deephaven.engine.testutil.ColumnInfo; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.*; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.apache.commons.lang3.mutable.MutableInt; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.*; import java.util.Objects; import java.util.Random; @@ -24,10 +23,13 @@ */ public class TestColumnIterators { - private static Table input; + @Rule + public final EngineCleanup framework = new EngineCleanup(); - @BeforeClass - public static void setUpOnce() { + private Table input; + + @Before + public void setUp() { input = TstUtils.getTable(false, 100_000, new Random(0), new ColumnInfo[] { new ColumnInfo<>(new CharGenerator('A', 'z', 0.1), "CharCol"), new ColumnInfo<>(new ByteGenerator((byte) -100, (byte) 100, 0.1), "ByteCol"), @@ -41,8 +43,8 @@ public static void setUpOnce() { }); } - @AfterClass - public static void tearDownOnce() { + @After + public void tearDown() { input = null; } diff --git a/engine/benchmark/build.gradle b/engine/benchmark/build.gradle index 0b0a127f357..ec1d1ef31d3 100644 --- a/engine/benchmark/build.gradle +++ b/engine/benchmark/build.gradle @@ -58,7 +58,6 @@ task jmhRun(type: JavaExec) { '-Dconfiguration.quiet=true', '-Djava.awt.headless=true', '-DQueryTable.memoizeResults=false', - '-DUpdateGraphProcessor.checkTableOperations=false', '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=9500' } @@ -77,7 +76,6 @@ def createJmhTask = { '-Dconfiguration.quiet=true', '-Djava.awt.headless=true', '-DQueryTable.memoizeResults=false', - '-DUpdateGraphProcessor.checkTableOperations=false', "-Xmx$heapSize" //'-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=9501' ] diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/AjBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/AjBenchmark.java index 68cca63596f..a5310776b39 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/AjBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/AjBenchmark.java @@ -3,8 +3,10 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.util.metrics.MetricsManager; import io.deephaven.benchmarking.*; @@ -60,7 +62,8 @@ public class AjBenchmark { public void setupEnv(BenchmarkParams params) { System.out.println("Setup started: " + new Date()); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final BenchmarkTableBuilder rightBuilder; final BenchmarkTableBuilder leftBuilder; @@ -169,7 +172,7 @@ public Table ajStatic(Blackhole bh) { if (buckets == 0) { throw new UnsupportedOperationException("Buckets must be positive!"); } - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock() + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> leftTable.aj(rightTable, joinKeyName + ",LeftStamp>=RightStamp", "RightSentinel")); return doFingerPrint(result, bh); } @@ -180,7 +183,7 @@ public Table ajLeftIncremental(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked( () -> lt.aj(rightTable, joinKeyName + ",LeftStamp>=RightStamp", "RightSentinel")), leftTable); @@ -193,7 +196,7 @@ public Table ajLeftIncrementalSmallSteps(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked( () -> lt.aj(rightTable, joinKeyName + ",LeftStamp>=RightStamp", "RightSentinel")), leftTable, 100); @@ -206,7 +209,7 @@ public Table ajLeftIncrementalTinySteps(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked( () -> lt.aj(rightTable, joinKeyName + ",LeftStamp>=RightStamp", "RightSentinel")), leftTable, 1000); @@ -219,7 +222,7 @@ public Table ajRightIncremental(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (rt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (rt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> leftTable.aj(rt, joinKeyName + ",LeftStamp>=RightStamp", "RightSentinel")), rightTable); return doFingerPrint(result, bh); @@ -230,7 +233,7 @@ public Table ajZkStatic(Blackhole bh) { if (buckets != 0) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock() + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> leftTable.aj(rightTable, "LeftStamp>=RightStamp", "RightSentinel")); return doFingerPrint(result, bh); } @@ -242,7 +245,7 @@ public Table ajZkLeftIncremental(Blackhole bh) { } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> lt.aj(rightTable, "LeftStamp>=RightStamp", "RightSentinel")), leftTable); return doFingerPrint(result, bh); @@ -255,7 +258,7 @@ public Table ajZkRightIncremental(Blackhole bh) { } final Table result = IncrementalBenchmark.incrementalBenchmark( - (rt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (rt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> leftTable.aj(rt, "LeftStamp>=RightStamp", "RightSentinel")), rightTable); return doFingerPrint(result, bh); @@ -267,7 +270,7 @@ public Table ajZkIncremental(Blackhole bh) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt, rt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> lt.aj(rt, "LeftStamp>=RightStamp", "RightSentinel")), leftTable, rightTable); return doFingerPrint(result, bh); @@ -279,7 +282,7 @@ public Table ajZkIncrementalStartup(Blackhole bh) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt, rt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> lt.aj(rt, "LeftStamp>=RightStamp", "RightSentinel")), leftTable, rightTable, 0.95, 1); return doFingerPrint(result, bh); @@ -291,7 +294,7 @@ public Table ajZkIncrementalSmallSteps(Blackhole bh) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt, rt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> lt.aj(rt, "LeftStamp>=RightStamp", "RightSentinel")), leftTable, rightTable, 0.1, 100); return doFingerPrint(result, bh); @@ -304,7 +307,7 @@ public Table ajIncrementalSmallSteps(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt, rt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> lt.aj(rt, joinKeyName + ",LeftStamp>=RightStamp", "RightSentinel")), leftTable, rightTable, 0.1, 100); return doFingerPrint(result, bh); @@ -316,7 +319,7 @@ public Table ajIncremental(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> UpdateGraphProcessor.DEFAULT.sharedLock() + (lt, rt) -> ExecutionContext.getContext().getUpdateGraph().sharedLock() .computeLocked(() -> lt.aj(rt, joinKeyName + ",LeftStamp>=RightStamp", "RightSentinel")), leftTable, rightTable); return doFingerPrint(result, bh); diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/ConditionFilterMultipleColumnsBench.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/ConditionFilterMultipleColumnsBench.java index 5907ee1d16b..f283b729977 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/ConditionFilterMultipleColumnsBench.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/ConditionFilterMultipleColumnsBench.java @@ -3,13 +3,15 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.ConditionFilter; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; import io.deephaven.engine.table.impl.select.WhereFilter; import io.deephaven.benchmarking.*; import io.deephaven.benchmarking.runner.TableBenchmarkState; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.infra.BenchmarkParams; import org.openjdk.jmh.infra.Blackhole; @@ -51,7 +53,8 @@ public void setupEnv(final BenchmarkParams params) { if (nFilterCols < 1 || nAdditionalCols < 0) { throw new IllegalArgumentException(); } - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); final BenchmarkTableBuilder builder; @@ -85,7 +88,7 @@ public void setupEnv(final BenchmarkParams params) { final BenchmarkTable bmTable = builder.build(); final Table t = bmTable.getTable(); if (doSelect) { - inputTable = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( + inputTable = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( () -> t.select(tCols).sort(sortCol).coalesce()); } else { inputTable = t.sort(sortCol).coalesce(); @@ -115,11 +118,12 @@ public void setupInvocation() { final Table result = inputReleased.where(filter); // Compute the first pass of live iterations outside of the bench measurement, // to avoid including the time to setup the filter itself. - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); final long fullyReleasedSize = inputTable.size(); bench = () -> { while (inputReleased.size() < fullyReleasedSize) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); } return result; }; diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/GroupByBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/GroupByBenchmark.java index f5d8c1ed980..8ad1b0f1ab5 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/GroupByBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/GroupByBenchmark.java @@ -3,9 +3,11 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.benchmarking.*; @@ -57,7 +59,8 @@ public class GroupByBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); QueryTable.setMemoizeResults(false); final BenchmarkTableBuilder builder; @@ -153,26 +156,26 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table byStatic(@NotNull final Blackhole bh) { - final Table result = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.groupBy(keyName.split("[, ]+"))); + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> table.groupBy(keyName.split("[, ]+"))); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table byIncremental(@NotNull final Blackhole bh) { - final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.groupBy(keyName.split("[, ]+"))), - table); + final Table result = IncrementalBenchmark.incrementalBenchmark((t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> t.groupBy(keyName.split("[, ]+"))); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table partitionByStatic(@NotNull final Blackhole bh) { - final PartitionedTable result = - UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.partitionBy(keyName.split("[, ]+"))); + final PartitionedTable result = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> table.partitionBy(keyName.split("[, ]+"))); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -180,8 +183,10 @@ public Table partitionByStatic(@NotNull final Blackhole bh) { @Benchmark public Table partitionByIncremental(@NotNull final Blackhole bh) { final PartitionedTable result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> t.partitionBy(keyName.split("[, ]+"))), + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.partitionBy(keyName.split("[, ]+"))); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/IncrementalBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/IncrementalBenchmark.java index 61a8851ab1d..ef166235545 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/IncrementalBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/IncrementalBenchmark.java @@ -3,9 +3,10 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableUpdate; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.updategraph.DynamicNode; import io.deephaven.engine.table.impl.InstrumentedTableUpdateListenerAdapter; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; @@ -23,10 +24,11 @@ static R incrementalBenchmark(final Function function, final Table final R result = function.apply(filtered); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); while (filtered.size() < inputTable.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); } return result; @@ -39,10 +41,11 @@ static R rollingBenchmark(final Function function, final Table inp final R result = function.apply(filtered); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); for (int currentStep = 0; currentStep <= steps; currentStep++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); } return result; @@ -97,11 +100,12 @@ public void onFailureInternal(Throwable originalException, Entry sourceEntry) { failureListener = null; } - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); while (filtered1.size() < inputTable1.size() || filtered2.size() < inputTable2.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter1::run); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter2::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter1::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter2::run); } if (failureListener != null) { diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/LastByBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/LastByBenchmark.java index 9b568631bb0..499cd890744 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/LastByBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/LastByBenchmark.java @@ -5,8 +5,10 @@ import io.deephaven.api.agg.Aggregation; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.benchmarking.*; @@ -67,7 +69,8 @@ public class LastByBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); QueryTable.setMemoizeResults(false); final BenchmarkTableBuilder builder; @@ -192,8 +195,8 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table lastByStatic(@NotNull final Blackhole bh) { - final Table result = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.lastBy(keyColumnNames)); + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> table.lastBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -201,7 +204,10 @@ public Table lastByStatic(@NotNull final Blackhole bh) { @Benchmark public Table lastByIncremental(@NotNull final Blackhole bh) { final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.lastBy(keyColumnNames)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.lastBy(keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -209,7 +215,10 @@ public Table lastByIncremental(@NotNull final Blackhole bh) { @Benchmark public Table lastByRolling(@NotNull final Blackhole bh) { final Table result = IncrementalBenchmark.rollingBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.lastBy(keyColumnNames)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.lastBy(keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -221,9 +230,10 @@ public Table lastFirstByStatic(@NotNull final Blackhole bh) { final Aggregation firstCols = AggFirst(IntStream.range(1, valueCount + 1) .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> t.aggBy(List.of(lastCols, firstCols), keyColumnNames)), + final Table result = IncrementalBenchmark.rollingBenchmark((t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> t.aggBy(List.of(lastCols, firstCols), keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); @@ -236,9 +246,10 @@ public Table lastFirstByIncremental(@NotNull final Blackhole bh) { final Aggregation firstCols = AggFirst(IntStream.range(1, valueCount + 1) .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> t.aggBy(List.of(lastCols, firstCols), keyColumnNames)), + final Table result = IncrementalBenchmark.rollingBenchmark((t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> t.aggBy(List.of(lastCols, firstCols), keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); @@ -251,9 +262,10 @@ public Table lastFirstByRolling(@NotNull final Blackhole bh) { final Aggregation firstCols = AggFirst(IntStream.range(1, valueCount + 1) .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> t.aggBy(List.of(lastCols, firstCols), keyColumnNames)), + final Table result = IncrementalBenchmark.rollingBenchmark((t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> t.aggBy(List.of(lastCols, firstCols), keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/MatchFilterBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/MatchFilterBenchmark.java index b47b0737404..e6d68ef7c7d 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/MatchFilterBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/MatchFilterBenchmark.java @@ -3,8 +3,10 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.time.DateTimeUtils; import io.deephaven.engine.table.impl.select.*; import io.deephaven.benchmarking.*; @@ -54,7 +56,8 @@ public class MatchFilterBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final BenchmarkTableBuilder builder; final int actualSize = BenchmarkTools.sizeWithSparsity(tableSize, sparsity); @@ -143,10 +146,11 @@ private R incrementalBenchmark(Function function) { final R result = function.apply(filtered); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); while (filtered.size() < inputTable.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); } return result; diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinBenchmark.java index 6be758c9484..085a1b16fac 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinBenchmark.java @@ -3,8 +3,10 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.util.metrics.MetricsManager; import io.deephaven.benchmarking.*; import io.deephaven.benchmarking.generator.ColumnGenerator; @@ -50,7 +52,8 @@ public class NaturalJoinBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final BenchmarkTableBuilder rightBuilder; final BenchmarkTableBuilder leftBuilder; @@ -159,16 +162,17 @@ public void tearDownInvocation() { @Benchmark public Table naturalJoinStatic() { - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> leftTable.naturalJoin(rightTable, joinKeyName)); + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> leftTable.naturalJoin(rightTable, joinKeyName)); return state.setResult(result); } @Benchmark public Table naturalJoinIncremental() { - final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> lt.naturalJoin(rt, joinKeyName)), + final Table result = IncrementalBenchmark.incrementalBenchmark((lt, rt) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> lt.naturalJoin(rt, joinKeyName)); + }, leftTable, rightTable); return state.setResult(result); } diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinMultipleColumnsBench.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinMultipleColumnsBench.java index eeb6b071a9f..4faaa4bc2cd 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinMultipleColumnsBench.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinMultipleColumnsBench.java @@ -3,10 +3,12 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.benchmarking.*; import io.deephaven.benchmarking.runner.TableBenchmarkState; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.infra.BenchmarkParams; import org.openjdk.jmh.infra.Blackhole; @@ -52,7 +54,8 @@ public void setupEnv(final BenchmarkParams params) { + t1NumberOfAdditionalColumns + ") have to be >= 1."); } state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final BenchmarkTableBuilder builder1; final String t1PartCol = "T1PartCol"; builder1 = BenchmarkTools.persistentTableBuilder("T1", tableSize); @@ -133,10 +136,11 @@ public void finishIteration(BenchmarkParams params) throws IOException { public Table naturalJoinBench(final Blackhole bh) { final Table result; if (doSelect) { - result = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> IncrementalBenchmark - .incrementalBenchmark((Table t) -> t.select(t1Cols).sort(sortCol).naturalJoin( - t2, joinColsStr, joinColumnsToAddStr), inputTable, steps)); + result = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .computeLocked(() -> IncrementalBenchmark.incrementalBenchmark( + (Table t) -> t.select(t1Cols).sort(sortCol).naturalJoin( + t2, joinColsStr, joinColumnsToAddStr), + inputTable, steps)); } else { result = IncrementalBenchmark.incrementalBenchmark((Table t) -> t.sort(sortCol).naturalJoin( t2, joinColsStr, joinColumnsToAddStr), inputTable, steps); diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinMultipleColumnsFillChunkBench.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinMultipleColumnsFillChunkBench.java index 5ba9428698b..9d83fed0861 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinMultipleColumnsFillChunkBench.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/NaturalJoinMultipleColumnsFillChunkBench.java @@ -3,8 +3,8 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.benchmarking.BenchUtil; @@ -104,7 +104,7 @@ protected QueryData getQuery() { final String joinColsStr = String.join(",", joinCols); final String joinColumnsToAddStr = String.join(",", joinColumnsToAdd); if (doSelect) { - live = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( + live = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( () -> t1Released.select(t1Cols).sort(sortCol).naturalJoin(t2, joinColsStr, joinColumnsToAddStr)); } else { live = t1Released.sort(sortCol).naturalJoin(t2, joinColsStr, joinColumnsToAddStr); diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/PercentileByBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/PercentileByBenchmark.java index 014ff852e53..c029222a687 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/PercentileByBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/PercentileByBenchmark.java @@ -5,8 +5,10 @@ import io.deephaven.api.agg.spec.AggSpec; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.benchmarking.*; @@ -64,7 +66,8 @@ public class PercentileByBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); QueryTable.setMemoizeResults(false); final BenchmarkTableBuilder builder; @@ -188,7 +191,8 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table percentileByStatic(@NotNull final Blackhole bh) { final Function fut = getFunction(); - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> fut.apply(table)); + final Table result = + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(() -> fut.apply(table)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -210,7 +214,10 @@ private Function getFunction() { public Table percentileByIncremental(@NotNull final Blackhole bh) { final Function fut = getFunction(); final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> fut.apply(t)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> fut.apply(t)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RangeFilterBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RangeFilterBenchmark.java index c435bfe2f68..4aa1d2f3503 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RangeFilterBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RangeFilterBenchmark.java @@ -3,8 +3,10 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.time.DateTimeUtils; import io.deephaven.engine.table.impl.select.*; import io.deephaven.benchmarking.*; @@ -49,7 +51,8 @@ public class RangeFilterBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final BenchmarkTableBuilder builder; final int actualSize = BenchmarkTools.sizeWithSparsity(tableSize, sparsity); @@ -159,10 +162,11 @@ private R incrementalBenchmark(Function function) { final R result = function.apply(filtered); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); while (filtered.size() < inputTable.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); } return result; diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RedirectedColumnSourceBench.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RedirectedColumnSourceBench.java index 748d440033f..4c33d7552a5 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RedirectedColumnSourceBench.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RedirectedColumnSourceBench.java @@ -4,8 +4,8 @@ package io.deephaven.benchmark.engine; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; import io.deephaven.chunk.*; import io.deephaven.benchmarking.BenchUtil; @@ -114,8 +114,8 @@ protected QueryData getQuery() { new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table live; if (doSelect) { - live = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> t.where(incrementalReleaseFilter).select(selectCols).sort(sortCol)); + live = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> t.where(incrementalReleaseFilter).select(selectCols).sort(sortCol)); } else { live = t.where(incrementalReleaseFilter).sort(sortCol); } diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RedirectionBenchBase.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RedirectionBenchBase.java index 68706ebf759..315ae02079a 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RedirectionBenchBase.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RedirectionBenchBase.java @@ -5,8 +5,9 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.WritableChunk; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; @@ -15,6 +16,7 @@ import io.deephaven.benchmarking.BenchmarkTools; import io.deephaven.benchmarking.runner.TableBenchmarkState; import io.deephaven.engine.rowset.RowSet; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Level; import org.openjdk.jmh.annotations.Setup; @@ -67,13 +69,15 @@ public void setupEnv(final BenchmarkParams params) { chunkCapacity = Integer.parseInt(params.getParam("chunkCapacity")); skipResultsProcessing = Boolean.parseBoolean(params.getParam("skipResultsProcessing")); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); final QueryData queryData = getQuery(); for (int step = 0; step < queryData.steps; ++step) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(queryData.incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(queryData.incrementalReleaseFilter::run); } inputTable = queryData.live; nFillCols = queryData.fillCols.length; diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RegionedColumnSourceBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RegionedColumnSourceBenchmark.java index cdae947a7aa..6ab5cae1fb4 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RegionedColumnSourceBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RegionedColumnSourceBenchmark.java @@ -4,8 +4,11 @@ package io.deephaven.benchmark.engine; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.ColumnSource; @@ -94,9 +97,10 @@ final void copy(@NotNull final ColumnSource columnSource, @NotNull final Writabl @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - Configuration.getInstance().setProperty(UpdateGraphProcessor.ALLOW_UNIT_TEST_MODE_PROP, "true"); + Configuration.getInstance().setProperty(PeriodicUpdateGraph.ALLOW_UNIT_TEST_MODE_PROP, "true"); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final BenchmarkTableBuilder builder; final int actualSize = BenchmarkTools.sizeWithSparsity(tableSize, sparsity); diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RowRedirectionBench.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RowRedirectionBench.java index 4d4ea641168..bac61ddaac7 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RowRedirectionBench.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/RowRedirectionBench.java @@ -3,8 +3,8 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.benchmarking.BenchUtil; @@ -62,13 +62,16 @@ protected QueryData getQuery() { new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table live; if (doSelect) { - live = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( - () -> t1.where(incrementalReleaseFilter).select(joinCol, "PartCol1", "I1").sort("I1").naturalJoin( - t2, joinCol, "PartCol2")); + live = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> t1.where(incrementalReleaseFilter) + .select(joinCol, "PartCol1", "I1") + .sort("I1") + .naturalJoin(t2, joinCol, "PartCol2")); } else { - live = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( - () -> t1.where(incrementalReleaseFilter).sort("I1").naturalJoin( - t2, joinCol, "PartCol2")); + live = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> t1.where(incrementalReleaseFilter) + .sort("I1") + .naturalJoin(t2, joinCol, "PartCol2")); } return new QueryData( live, diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SortBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SortBenchmark.java index 21a66425741..26a072ee07d 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SortBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SortBenchmark.java @@ -4,10 +4,11 @@ package io.deephaven.benchmark.engine; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.rowset.TrackingWritableRowSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.TableUpdateImpl; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.SortHelpers; @@ -17,6 +18,7 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.benchmarking.*; import io.deephaven.benchmarking.generator.EnumStringColumnGenerator; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import org.apache.commons.lang3.mutable.MutableInt; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.infra.BenchmarkParams; @@ -78,7 +80,8 @@ public void setupEnv(BenchmarkParams params) { Assert.eqTrue(workingSize % sizePerStep == 0, "Cannot evenly divide working size by step size."); workingSizeInSteps = workingSize / sizePerStep; - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final int nVals = (int) (enumSize < 1 ? enumSize * tableSize : enumSize); System.out.println("String Values: " + nVals); @@ -153,7 +156,7 @@ public void setupEnv(BenchmarkParams params) { rollingInputTable.setRefreshing(true); rollingOutputTable = rollingInputTable.sort(sortCol); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); } private long currStep = 0; @@ -168,7 +171,8 @@ public Table incrementalSort() { } currStep = (currStep + 1) % numSteps; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); return incrementalTable; } @@ -176,7 +180,8 @@ public Table incrementalSort() { @Benchmark public Table rollingSort() { Assert.eq(rollingSortTable.size(), "result.size()", workingSize, "inputTable.size()"); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(rollingReleaseFilter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(rollingReleaseFilter::run); return rollingSortTable; } @@ -200,7 +205,8 @@ public Table rollingWithModNoSort() { update.modifiedColumnSet = mcsWithoutSortColumn; update.shifted = RowSetShiftData.EMPTY; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { rollingInputRowSet.update(update.added(), update.removed()); rollingInputTable.notifyListeners(update); }); @@ -226,7 +232,8 @@ public Table rollingWithModSort() { update.modifiedColumnSet = mcsWithSortColumn; update.shifted = RowSetShiftData.EMPTY; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { rollingInputRowSet.update(update.added(), update.removed()); rollingInputTable.notifyListeners(update); }); diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SparseSelectBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SparseSelectBenchmark.java index ecf5ecabe5f..e0119046016 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SparseSelectBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SparseSelectBenchmark.java @@ -4,14 +4,16 @@ package io.deephaven.benchmark.engine; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.SparseSelect; import io.deephaven.benchmarking.BenchUtil; import io.deephaven.benchmarking.BenchmarkTable; import io.deephaven.benchmarking.BenchmarkTableBuilder; import io.deephaven.benchmarking.BenchmarkTools; import io.deephaven.benchmarking.runner.TableBenchmarkState; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.infra.BenchmarkParams; @@ -43,7 +45,8 @@ public class SparseSelectBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final int actualSize = BenchmarkTools.sizeWithSparsity(tableSize, sparsity); @@ -90,7 +93,7 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table incrementalSparseSelect() { - final Table result = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( + final Table result = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( () -> IncrementalBenchmark.incrementalBenchmark(SparseSelect::sparseSelect, inputTable, 10)); Assert.eq(result.size(), "result.size()", inputTable.size(), "inputTable.size()"); return state.setResult(result); @@ -98,9 +101,8 @@ public Table incrementalSparseSelect() { @Benchmark public Table sparseSelect() { - return state.setResult( - UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> SparseSelect.sparseSelect(inputTable))); + return state.setResult(ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> SparseSelect.sparseSelect(inputTable))); } public static void main(final String[] args) { diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SumByBenchmark.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SumByBenchmark.java index 3de8478c4fe..2f5bc33a407 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SumByBenchmark.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/SumByBenchmark.java @@ -5,8 +5,10 @@ import io.deephaven.api.agg.Aggregation; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.benchmarking.*; @@ -64,7 +66,8 @@ public class SumByBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + TestExecutionContext.createForUnitTests().open(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); QueryTable.setMemoizeResults(false); final BenchmarkTableBuilder builder; @@ -187,7 +190,8 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table sumByStatic(@NotNull final Blackhole bh) { - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.sumBy(keyColumnNames)); + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> table.sumBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -195,7 +199,10 @@ public Table sumByStatic(@NotNull final Blackhole bh) { @Benchmark public Table sumByIncremental(@NotNull final Blackhole bh) { final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.sumBy(keyColumnNames)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.sumBy(keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -203,14 +210,18 @@ public Table sumByIncremental(@NotNull final Blackhole bh) { @Benchmark public Table sumByRolling(@NotNull final Blackhole bh) { final Table result = IncrementalBenchmark.rollingBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.sumBy(keyColumnNames)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.sumBy(keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table minByStatic(@NotNull final Blackhole bh) { - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.minBy(keyColumnNames)); + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> table.minBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -218,7 +229,10 @@ public Table minByStatic(@NotNull final Blackhole bh) { @Benchmark public Table minByIncremental(@NotNull final Blackhole bh) { final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.minBy(keyColumnNames)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.minBy(keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -226,7 +240,10 @@ public Table minByIncremental(@NotNull final Blackhole bh) { @Benchmark public Table minByRolling(@NotNull final Blackhole bh) { final Table result = IncrementalBenchmark.rollingBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.minBy(keyColumnNames)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.minBy(keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -238,8 +255,8 @@ public Table minMaxByStatic(@NotNull final Blackhole bh) { final Aggregation maxCols = AggMax(IntStream.range(1, valueCount + 1) .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.aggBy(List.of(minCols, maxCols), keyColumnNames)); + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> table.aggBy(List.of(minCols, maxCols), keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -252,8 +269,10 @@ public Table minMaxByIncremental(@NotNull final Blackhole bh) { .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> t.aggBy(List.of(minCols, maxCols), keyColumnNames)), + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> t.aggBy(List.of(minCols, maxCols), keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); @@ -267,8 +286,10 @@ public Table minMaxByRolling(@NotNull final Blackhole bh) { .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); final Table result = IncrementalBenchmark.rollingBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> t.aggBy(List.of(minCols, maxCols), keyColumnNames)), + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> t.aggBy(List.of(minCols, maxCols), keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); @@ -276,7 +297,8 @@ public Table minMaxByRolling(@NotNull final Blackhole bh) { @Benchmark public Table varByStatic(@NotNull final Blackhole bh) { - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.varBy(keyColumnNames)); + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> table.varBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -284,14 +306,18 @@ public Table varByStatic(@NotNull final Blackhole bh) { @Benchmark public Table varByIncremental(@NotNull final Blackhole bh) { final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.varBy(keyColumnNames)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.varBy(keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table avgByStatic(@NotNull final Blackhole bh) { - final Table result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.avgBy(keyColumnNames)); + final Table result = ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> table.avgBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -299,7 +325,10 @@ public Table avgByStatic(@NotNull final Blackhole bh) { @Benchmark public Table avgByIncremental(@NotNull final Blackhole bh) { final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> t.avgBy(keyColumnNames)), table); + (t) -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock() + .computeLocked(() -> t.avgBy(keyColumnNames)); + }, table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/UngroupedColumnSourceBench.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/UngroupedColumnSourceBench.java index 0a31d662b4c..3b26cba5ae0 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/UngroupedColumnSourceBench.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/UngroupedColumnSourceBench.java @@ -3,8 +3,8 @@ */ package io.deephaven.benchmark.engine; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.benchmarking.BenchUtil; @@ -62,13 +62,16 @@ protected QueryData getQuery() { new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table live; if (doSelect) { - live = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( - () -> t1.where(incrementalReleaseFilter).select(joinCol, "PartCol1", "I1").sort("I1").join( - t2, joinCol, "PartCol2")); + live = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> t1.where(incrementalReleaseFilter) + .select(joinCol, "PartCol1", "I1") + .sort("I1") + .join(t2, joinCol, "PartCol2")); } else { - live = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( - () -> t1.where(incrementalReleaseFilter).sort("I1").join( - t2, joinCol, "PartCol2")); + live = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> t1.where(incrementalReleaseFilter) + .sort("I1") + .join(t2, joinCol, "PartCol2")); } return new QueryData( live, diff --git a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/util/ShiftAwareBench.java b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/util/ShiftAwareBench.java index fc26e44f09a..0f6e40cc0a5 100644 --- a/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/util/ShiftAwareBench.java +++ b/engine/benchmark/src/benchmark/java/io/deephaven/benchmark/engine/util/ShiftAwareBench.java @@ -4,8 +4,9 @@ package io.deephaven.benchmark.engine.util; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; import io.deephaven.benchmarking.BenchmarkTable; @@ -56,7 +57,7 @@ public class ShiftAwareBench { public void setupEnv(BenchmarkParams params) { Configuration.getInstance().setProperty("QueryTable.memoizeResults", "false"); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder("ShiftAwareBench", BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); @@ -92,10 +93,11 @@ private R incrementalBenchmark(Function function) { final R result = function.apply(filtered); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); while (filtered.size() < inputTable.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); } return result; diff --git a/engine/context/build.gradle b/engine/context/build.gradle index c07295e33c5..f1c84595ed0 100644 --- a/engine/context/build.gradle +++ b/engine/context/build.gradle @@ -17,6 +17,7 @@ dependencies { implementation project(':log-factory') implementation project(':Util') implementation project(':engine-time') + implementation project(':engine-updategraph') implementation project(':table-api') implementation project(':IO') @@ -25,6 +26,7 @@ dependencies { Classpaths.inheritCommonsText(project, 'implementation') testImplementation TestTools.projectDependency(project, 'Base') + testImplementation project(':engine-test-utils') testRuntimeOnly project(':log-to-slf4j'), project(path: ':configs'), diff --git a/engine/context/src/main/java/io/deephaven/engine/context/ExecutionContext.java b/engine/context/src/main/java/io/deephaven/engine/context/ExecutionContext.java index 9b19b113433..05643ce6166 100644 --- a/engine/context/src/main/java/io/deephaven/engine/context/ExecutionContext.java +++ b/engine/context/src/main/java/io/deephaven/engine/context/ExecutionContext.java @@ -4,6 +4,7 @@ package io.deephaven.engine.context; import io.deephaven.auth.AuthContext; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.util.SafeCloseable; import io.deephaven.util.annotations.ScriptApi; import io.deephaven.util.annotations.VisibleForTesting; @@ -33,7 +34,9 @@ public static ExecutionContext getDefaultContext() { if ((localContext = defaultContext) == null) { synchronized (ExecutionContext.class) { if ((localContext = defaultContext) == null) { - localContext = defaultContext = new Builder(null).markSystemic().build(); + localContext = defaultContext = new Builder(null) + .markSystemic() + .build(); } } } @@ -79,18 +82,21 @@ private static void setContext(final ExecutionContext context) { private final QueryLibrary queryLibrary; private final QueryScope queryScope; private final QueryCompiler queryCompiler; + private final UpdateGraph updateGraph; private ExecutionContext( final boolean isSystemic, final AuthContext authContext, final QueryLibrary queryLibrary, final QueryScope queryScope, - final QueryCompiler queryCompiler) { + final QueryCompiler queryCompiler, + final UpdateGraph updateGraph) { this.isSystemic = isSystemic; this.authContext = authContext; this.queryLibrary = Objects.requireNonNull(queryLibrary); this.queryScope = Objects.requireNonNull(queryScope); this.queryCompiler = Objects.requireNonNull(queryCompiler); + this.updateGraph = updateGraph; } /** @@ -104,7 +110,7 @@ public ExecutionContext withSystemic(boolean isSystemic) { if (isSystemic == this.isSystemic) { return this; } - return new ExecutionContext(isSystemic, authContext, queryLibrary, queryScope, queryCompiler); + return new ExecutionContext(isSystemic, authContext, queryLibrary, queryScope, queryCompiler, updateGraph); } /** @@ -118,7 +124,21 @@ public ExecutionContext withAuthContext(final AuthContext authContext) { if (authContext == this.authContext) { return this; } - return new ExecutionContext(isSystemic, authContext, queryLibrary, queryScope, queryCompiler); + return new ExecutionContext(isSystemic, authContext, queryLibrary, queryScope, queryCompiler, updateGraph); + } + + /** + * Returns, or creates, an execution context with the given value for {@code updateGraph} and existing values for + * the other members. This is not intended to be used by user code. + * + * @param updateGraph the update context to use instead + * @return the execution context + */ + public ExecutionContext withUpdateGraph(final UpdateGraph updateGraph) { + if (updateGraph == this.updateGraph) { + return this; + } + return new ExecutionContext(isSystemic, authContext, queryLibrary, queryScope, queryCompiler, updateGraph); } /** @@ -174,6 +194,10 @@ public AuthContext getAuthContext() { return authContext; } + public UpdateGraph getUpdateGraph() { + return updateGraph; + } + @SuppressWarnings("unused") public static class Builder { private boolean isSystemic = false; @@ -183,6 +207,7 @@ public static class Builder { private QueryLibrary queryLibrary = PoisonedQueryLibrary.INSTANCE; private QueryScope queryScope = PoisonedQueryScope.INSTANCE; private QueryCompiler queryCompiler = PoisonedQueryCompiler.INSTANCE; + private UpdateGraph updateGraph = PoisonedUpdateGraph.INSTANCE; private Builder() { // propagate the auth context from the current context @@ -320,12 +345,30 @@ public Builder captureQueryScopeVars(String... vars) { return this; } + /** + * Use the provided UpdateGraph. + */ + @ScriptApi + public Builder setUpdateGraph(UpdateGraph updateGraph) { + this.updateGraph = updateGraph; + return this; + } + + /** + * Use the current ExecutionContext's UpdateGraph instance. + */ + @ScriptApi + public Builder captureUpdateGraph() { + this.updateGraph = getContext().getUpdateGraph(); + return this; + } + /** * @return the newly instantiated ExecutionContext */ @ScriptApi public ExecutionContext build() { - return new ExecutionContext(isSystemic, authContext, queryLibrary, queryScope, queryCompiler); + return new ExecutionContext(isSystemic, authContext, queryLibrary, queryScope, queryCompiler, updateGraph); } } } diff --git a/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryCompiler.java b/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryCompiler.java index 49b0eb02fcb..98cba3f36ee 100644 --- a/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryCompiler.java +++ b/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryCompiler.java @@ -3,9 +3,7 @@ */ package io.deephaven.engine.context; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; -import io.deephaven.util.NoExecutionContextRegisteredException; +import io.deephaven.util.ExecutionContextRegistrationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -13,16 +11,13 @@ import java.util.Map; public class PoisonedQueryCompiler extends QueryCompiler { - private static final Logger logger = LoggerFactory.getLogger(PoisonedQueryCompiler.class); + public static final PoisonedQueryCompiler INSTANCE = new PoisonedQueryCompiler(); private PoisonedQueryCompiler() {} private T fail() { - logger.error().append( - "No ExecutionContext provided, cannot use QueryCompiler. If this is being run in a thread, did you specify an ExecutionContext for the thread? Please refer to the documentation on ExecutionContext for details.") - .endl(); - throw new NoExecutionContextRegisteredException(); + throw ExecutionContextRegistrationException.onFailedComponentAccess("QueryCompiler"); } @Override diff --git a/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryLibrary.java b/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryLibrary.java index d3045948706..4d4361f2c6f 100644 --- a/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryLibrary.java +++ b/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryLibrary.java @@ -1,22 +1,17 @@ package io.deephaven.engine.context; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; -import io.deephaven.util.NoExecutionContextRegisteredException; +import io.deephaven.util.ExecutionContextRegistrationException; import java.util.Collection; public class PoisonedQueryLibrary extends QueryLibrary { - private static final Logger logger = LoggerFactory.getLogger(PoisonedQueryScope.class); + public static final PoisonedQueryLibrary INSTANCE = new PoisonedQueryLibrary(); private PoisonedQueryLibrary() {} private T fail() { - logger.error().append( - "No ExecutionContext provided, cannot use QueryLibrary. If this is being run in a thread, did you specify an ExecutionContext for the thread? Please refer to the documentation on ExecutionContext for details.") - .endl(); - throw new NoExecutionContextRegisteredException(); + throw ExecutionContextRegistrationException.onFailedComponentAccess("QueryLibrary"); } @Override diff --git a/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryScope.java b/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryScope.java index 770babb91a5..dda0ad0a035 100644 --- a/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryScope.java +++ b/engine/context/src/main/java/io/deephaven/engine/context/PoisonedQueryScope.java @@ -3,23 +3,18 @@ */ package io.deephaven.engine.context; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; -import io.deephaven.util.NoExecutionContextRegisteredException; +import io.deephaven.util.ExecutionContextRegistrationException; import java.util.Set; public class PoisonedQueryScope extends QueryScope { - private static final Logger logger = LoggerFactory.getLogger(PoisonedQueryScope.class); + public static final PoisonedQueryScope INSTANCE = new PoisonedQueryScope(); private PoisonedQueryScope() {} private T fail() { - logger.error().append( - "No ExecutionContext provided, cannot use QueryScope. If this is being run in a thread, did you specify an ExecutionContext for the thread? Please refer to the documentation on ExecutionContext for details.") - .endl(); - throw new NoExecutionContextRegisteredException(); + throw ExecutionContextRegistrationException.onFailedComponentAccess("QueryScope"); } @Override diff --git a/engine/context/src/main/java/io/deephaven/engine/context/PoisonedUpdateGraph.java b/engine/context/src/main/java/io/deephaven/engine/context/PoisonedUpdateGraph.java new file mode 100644 index 00000000000..6c261f19f19 --- /dev/null +++ b/engine/context/src/main/java/io/deephaven/engine/context/PoisonedUpdateGraph.java @@ -0,0 +1,116 @@ +package io.deephaven.engine.context; + +import io.deephaven.base.log.LogOutput; +import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.engine.updategraph.LogicalClockImpl; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.io.log.LogEntry; +import io.deephaven.util.ExecutionContextRegistrationException; +import io.deephaven.util.locks.AwareFunctionalLock; +import org.jetbrains.annotations.NotNull; + +import java.util.Collection; + +public class PoisonedUpdateGraph implements UpdateGraph { + + public static final PoisonedUpdateGraph INSTANCE = new PoisonedUpdateGraph(); + + // this frozen clock is always Idle + private final LogicalClock frozenClock = () -> 1; + + private PoisonedUpdateGraph() {} + + private T fail() { + throw ExecutionContextRegistrationException.onFailedComponentAccess("UpdateGraph"); + } + + @Override + public LogOutput append(LogOutput logOutput) { + return logOutput.append("PoisonedUpdateGraph"); + } + + @Override + public boolean satisfied(long step) { + return fail(); + } + + @Override + public UpdateGraph getUpdateGraph() { + return this; + } + + @Override + public void addNotification(@NotNull Notification notification) { + fail(); + } + + @Override + public void addNotifications(@NotNull Collection notifications) { + fail(); + } + + @Override + public boolean maybeAddNotification(@NotNull Notification notification, long deliveryStep) { + return fail(); + } + + @Override + public AwareFunctionalLock sharedLock() { + return fail(); + } + + @Override + public AwareFunctionalLock exclusiveLock() { + return fail(); + } + + @Override + public LogicalClock clock() { + return frozenClock; + } + + @Override + public int parallelismFactor() { + return fail(); + } + + @Override + public LogEntry logDependencies() { + return fail(); + } + + @Override + public boolean currentThreadProcessesUpdates() { + return fail(); + } + + @Override + public boolean serialTableOperationsSafe() { + return fail(); + } + + @Override + public boolean setSerialTableOperationsSafe(boolean newValue) { + return fail(); + } + + @Override + public void addSource(@NotNull Runnable updateSource) { + fail(); + } + + @Override + public void removeSource(@NotNull Runnable updateSource) { + fail(); + } + + @Override + public boolean supportsRefreshing() { + return false; + } + + @Override + public void requestRefresh() { + fail(); + } +} diff --git a/engine/context/src/test/java/io/deephaven/engine/context/TestExecutionContext.java b/engine/context/src/test/java/io/deephaven/engine/context/TestExecutionContext.java deleted file mode 100644 index d59f024aea9..00000000000 --- a/engine/context/src/test/java/io/deephaven/engine/context/TestExecutionContext.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.deephaven.engine.context; - -import io.deephaven.auth.AuthContext; - -public class TestExecutionContext { - public static ExecutionContext createForUnitTests() { - return new ExecutionContext.Builder(new AuthContext.SuperUser()) - .markSystemic() - .newQueryScope() - .newQueryLibrary() - .setQueryCompiler(QueryCompiler.createForUnitTests()) - .build(); - } -} diff --git a/engine/context/src/test/java/io/deephaven/engine/context/TestQueryCompiler.java b/engine/context/src/test/java/io/deephaven/engine/context/TestQueryCompiler.java index b0d5decb660..3e2da905d96 100644 --- a/engine/context/src/test/java/io/deephaven/engine/context/TestQueryCompiler.java +++ b/engine/context/src/test/java/io/deephaven/engine/context/TestQueryCompiler.java @@ -4,10 +4,9 @@ package io.deephaven.engine.context; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.time.DateTimeUtils; -import io.deephaven.util.SafeCloseable; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.lang.reflect.Method; @@ -51,17 +50,8 @@ public class TestQueryCompiler { CLASS_CODE = testClassCode1.toString(); } - private SafeCloseable executionContext; - - @Before - public void setUp() { - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @After - public void tearDown() { - executionContext.close(); - } + @Rule + public final EngineCleanup framework = new EngineCleanup(); @Test public void testParallelCompile() throws Throwable { diff --git a/engine/rowset/build.gradle b/engine/rowset/build.gradle index 3bf01edb715..dd836e0e552 100644 --- a/engine/rowset/build.gradle +++ b/engine/rowset/build.gradle @@ -11,6 +11,7 @@ dependencies { api depTrove3 implementation project(':Container') + implementation project(':engine-context') implementation project(':engine-updategraph') implementation project(':Configuration') implementation depCommonsLang3 @@ -21,6 +22,7 @@ dependencies { Classpaths.inheritJUnitClassic(project, 'testImplementation') testImplementation project(':base-test-utils') + testImplementation project(':engine-test-utils') testImplementation project(':engine-rowset-test-utils') testRuntimeOnly project(':log-to-slf4j'), project(path: ':configs'), diff --git a/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/TrackingWritableRowSetImpl.java b/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/TrackingWritableRowSetImpl.java index 7c64ffee224..3a9ebba16a5 100644 --- a/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/TrackingWritableRowSetImpl.java +++ b/engine/rowset/src/main/java/io/deephaven/engine/rowset/impl/TrackingWritableRowSetImpl.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.rowset.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.TrackingRowSet; import io.deephaven.engine.rowset.TrackingWritableRowSet; @@ -16,13 +17,13 @@ public class TrackingWritableRowSetImpl extends WritableRowSetImpl implements TrackingWritableRowSet { - private transient OrderedLongSet prevInnerSet; + private final LogicalClock clock; + private final WritableRowSetImpl prev; - private final WritableRowSetImpl prev = new UnmodifiableRowSetImpl(); + private transient OrderedLongSet prevInnerSet; /** - * Protects prevImpl. Only updated in checkPrev() and initializePreviousValue() (this later supposed to be used only - * right after the constructor, in special cases). + * Protects {@link #prevInnerSet}. Only updated in checkAndGetPrev() and initializePreviousValue(). */ private transient volatile long changeTimeStep; @@ -34,16 +35,18 @@ public TrackingWritableRowSetImpl() { public TrackingWritableRowSetImpl(final OrderedLongSet innerSet) { super(innerSet); - this.prevInnerSet = OrderedLongSet.EMPTY; + clock = ExecutionContext.getContext().getUpdateGraph().clock(); + prev = new UnmodifiableRowSetImpl(); + prevInnerSet = OrderedLongSet.EMPTY; changeTimeStep = -1; } private OrderedLongSet checkAndGetPrev() { - if (LogicalClock.DEFAULT.currentStep() == changeTimeStep) { + if (clock.currentStep() == changeTimeStep) { return prevInnerSet; } synchronized (this) { - final long currentClockStep = LogicalClock.DEFAULT.currentStep(); + final long currentClockStep = clock.currentStep(); if (currentClockStep == changeTimeStep) { return prevInnerSet; } diff --git a/engine/rowset/src/test/java/io/deephaven/engine/rowset/impl/TrackingWritableRowSetImplPrevTest.java b/engine/rowset/src/test/java/io/deephaven/engine/rowset/impl/TrackingWritableRowSetImplPrevTest.java index ba7760cd4bf..5658ed312f3 100644 --- a/engine/rowset/src/test/java/io/deephaven/engine/rowset/impl/TrackingWritableRowSetImplPrevTest.java +++ b/engine/rowset/src/test/java/io/deephaven/engine/rowset/impl/TrackingWritableRowSetImplPrevTest.java @@ -3,29 +3,21 @@ */ package io.deephaven.engine.rowset.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.TrackingRowSet; import io.deephaven.engine.rowset.TrackingWritableRowSet; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import org.junit.After; -import org.junit.Before; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.engine.updategraph.LogicalClockImpl; +import org.junit.Rule; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class TrackingWritableRowSetImplPrevTest { - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } + @Rule + final public EngineCleanup engineCleanup = new EngineCleanup(); @Test public void testPrevWithEmptyConstruction() { @@ -44,7 +36,8 @@ public void testPrevWithSingleRangeIxOnly() { ix.insert(2L); assertEquals(1L, ix.sizePrev()); assertEquals(1L, ix.firstRowKeyPrev()); - LogicalClock.DEFAULT.startUpdateCycle(); + final LogicalClockImpl clock = (LogicalClockImpl) ExecutionContext.getContext().getUpdateGraph().clock(); + clock.startUpdateCycle(); assertEquals(2L, ix.sizePrev()); assertEquals(2L, ix.lastRowKeyPrev()); ix.insert(3L); @@ -55,7 +48,7 @@ public void testPrevWithSingleRangeIxOnly() { assertEquals(4L, ix.size()); assertEquals(2L, ix.sizePrev()); assertEquals(2L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.completeUpdateCycle(); + clock.completeUpdateCycle(); assertEquals(4L, ix.size()); assertEquals(2L, ix.sizePrev()); assertEquals(2L, ix.lastRowKeyPrev()); @@ -63,7 +56,7 @@ public void testPrevWithSingleRangeIxOnly() { assertEquals(5L, ix.size()); assertEquals(2L, ix.sizePrev()); assertEquals(2L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.startUpdateCycle(); + clock.startUpdateCycle(); assertEquals(5L, ix.size()); assertEquals(5L, ix.sizePrev()); assertEquals(5L, ix.lastRowKeyPrev()); @@ -85,7 +78,8 @@ public void testPrevWithRspOnly() { assertEquals(2L, ix.sizePrev()); assertEquals(1L, ix.firstRowKeyPrev()); assertEquals(3L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.startUpdateCycle(); + final LogicalClockImpl clock = (LogicalClockImpl) ExecutionContext.getContext().getUpdateGraph().clock(); + clock.startUpdateCycle(); assertEquals(3L, ix.sizePrev()); assertEquals(1L, ix.firstRowKeyPrev()); assertEquals(5L, ix.lastRowKeyPrev()); @@ -99,7 +93,7 @@ public void testPrevWithRspOnly() { assertEquals(3L, ix.sizePrev()); assertEquals(1L, ix.firstRowKeyPrev()); assertEquals(5L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.completeUpdateCycle(); + clock.completeUpdateCycle(); assertEquals(5L, ix.size()); assertEquals(3L, ix.sizePrev()); assertEquals(1L, ix.firstRowKeyPrev()); @@ -109,7 +103,7 @@ public void testPrevWithRspOnly() { assertEquals(3L, ix.sizePrev()); assertEquals(1L, ix.firstRowKeyPrev()); assertEquals(5L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.startUpdateCycle(); + clock.startUpdateCycle(); assertEquals(6L, ix.size()); assertEquals(6L, ix.sizePrev()); assertEquals(11L, ix.lastRowKeyPrev()); @@ -117,41 +111,42 @@ public void testPrevWithRspOnly() { @Test public void testPrevWithSingleThenRspThenEmptyThenSingle() { - LogicalClock.DEFAULT.resetForUnitTests(); + final LogicalClockImpl clock = (LogicalClockImpl) ExecutionContext.getContext().getUpdateGraph().clock(); + clock.resetForUnitTests(); final TrackingWritableRowSet ix = RowSetFactory.fromKeys(1L).toTracking(); assertEquals(1L, ix.size()); assertEquals(1L, ix.firstRowKey()); assertEquals(1L, ix.sizePrev()); assertEquals(1L, ix.firstRowKeyPrev()); - LogicalClock.DEFAULT.startUpdateCycle(); + clock.startUpdateCycle(); ix.insert(3L); assertEquals(2L, ix.size()); assertEquals(3L, ix.lastRowKey()); assertEquals(1L, ix.sizePrev()); assertEquals(1L, ix.firstRowKeyPrev()); - LogicalClock.DEFAULT.completeUpdateCycle(); + clock.completeUpdateCycle(); assertEquals(1L, ix.sizePrev()); assertEquals(1L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.startUpdateCycle(); + clock.startUpdateCycle(); assertEquals(2L, ix.sizePrev()); assertEquals(3L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.completeUpdateCycle(); + clock.completeUpdateCycle(); assertEquals(2L, ix.sizePrev()); assertEquals(3L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.startUpdateCycle(); + clock.startUpdateCycle(); ix.removeRange(0, 4); assertEquals(2L, ix.sizePrev()); assertEquals(3L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.completeUpdateCycle(); + clock.completeUpdateCycle(); assertEquals(2L, ix.sizePrev()); assertEquals(3L, ix.lastRowKeyPrev()); - LogicalClock.DEFAULT.startUpdateCycle(); + clock.startUpdateCycle(); assertTrue(ix.copyPrev().isEmpty()); ix.insert(1L); assertTrue(ix.copyPrev().isEmpty()); - LogicalClock.DEFAULT.completeUpdateCycle(); + clock.completeUpdateCycle(); assertTrue(ix.copyPrev().isEmpty()); - LogicalClock.DEFAULT.startUpdateCycle(); + clock.startUpdateCycle(); assertEquals(1L, ix.sizePrev()); assertEquals(1L, ix.lastRowKeyPrev()); } diff --git a/engine/table/src/jmh/java/io/deephaven/engine/bench/IncrementalSortCyclesBenchmarkBase.java b/engine/table/src/jmh/java/io/deephaven/engine/bench/IncrementalSortCyclesBenchmarkBase.java index 911f63c57f1..777718e45ed 100644 --- a/engine/table/src/jmh/java/io/deephaven/engine/bench/IncrementalSortCyclesBenchmarkBase.java +++ b/engine/table/src/jmh/java/io/deephaven/engine/bench/IncrementalSortCyclesBenchmarkBase.java @@ -1,8 +1,9 @@ package io.deephaven.engine.bench; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -30,7 +31,7 @@ public abstract class IncrementalSortCyclesBenchmarkBase { } private EngineCleanup engine; - private UpdateGraphProcessor ugp; + private ControlledUpdateGraph ug; private IncrementalReleaseFilter filter; private Table out; private BlackholeListener listener; @@ -41,8 +42,8 @@ public void init(long initialSize, long cycleSize, int numCycles, String indexTo throws Exception { engine = new EngineCleanup(); engine.setUp(); - ugp = UpdateGraphProcessor.DEFAULT; - ugp.startCycleForUnitTests(); + ug = ExecutionContext.getContext().getUpdateGraph().cast(); + ug.startCycleForUnitTests(); try { this.numCycles = numCycles; filter = new IncrementalReleaseFilter(initialSize, cycleSize); @@ -56,7 +57,7 @@ public void init(long initialSize, long cycleSize, int numCycles, String indexTo listener = new BlackholeListener(blackhole); out.addUpdateListener(listener); } finally { - ugp.completeCycleForUnitTests(); + ug.completeCycleForUnitTests(); } } @@ -67,18 +68,18 @@ public void teardown() throws Exception { listener = null; out.close(); out = null; - ugp = null; + ug = null; engine.tearDown(); engine = null; } public void runCycles() throws Throwable { for (int i = 0; i < numCycles; ++i) { - ugp.startCycleForUnitTests(); + ug.startCycleForUnitTests(); try { filter.run(); } finally { - ugp.completeCycleForUnitTests(); + ug.completeCycleForUnitTests(); } if (listener.e != null) { throw listener.e; diff --git a/engine/table/src/jmh/java/io/deephaven/engine/bench/IncrementalSortRedirectionBase.java b/engine/table/src/jmh/java/io/deephaven/engine/bench/IncrementalSortRedirectionBase.java index 93a7f71371e..f2e236e19b6 100644 --- a/engine/table/src/jmh/java/io/deephaven/engine/bench/IncrementalSortRedirectionBase.java +++ b/engine/table/src/jmh/java/io/deephaven/engine/bench/IncrementalSortRedirectionBase.java @@ -1,8 +1,9 @@ package io.deephaven.engine.bench; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.openjdk.jmh.annotations.Benchmark; @@ -50,7 +51,7 @@ public abstract class IncrementalSortRedirectionBase { private static final int REMAINING_ROWS = 1000000; private EngineCleanup engine; - private UpdateGraphProcessor ugp; + private ControlledUpdateGraph ug; private IncrementalReleaseFilter filter; private Table ms; private int numCycles; @@ -60,7 +61,7 @@ public abstract class IncrementalSortRedirectionBase { public void setup(Blackhole blackhole) throws Exception { engine = new EngineCleanup(); engine.setUp(); - ugp = UpdateGraphProcessor.DEFAULT; + ug = ExecutionContext.getContext().getUpdateGraph().cast(); final int componentSize = 2000000; final int numBuckets = 2000; @@ -72,11 +73,11 @@ public void setup(Blackhole blackhole) throws Exception { numCycles = remainingRows / cycleIncrement; // create the initial table - ugp.startCycleForUnitTests(); + ug.startCycleForUnitTests(); ms = create(componentSize, numBuckets, numParts, initialSize, cycleIncrement); listener = new BlackholeListener(blackhole); ms.addUpdateListener(listener); - ugp.completeCycleForUnitTests(); + ug.completeCycleForUnitTests(); } @@ -99,7 +100,7 @@ public void teardown() throws Exception { listener = null; ms.close(); ms = null; - ugp = null; + ug = null; engine.tearDown(); engine = null; } @@ -108,11 +109,11 @@ public void teardown() throws Exception { @OperationsPerInvocation(REMAINING_ROWS) public void numRows() throws Throwable { for (int i = 0; i < numCycles; ++i) { - ugp.startCycleForUnitTests(); + ug.startCycleForUnitTests(); try { filter.run(); } finally { - ugp.completeCycleForUnitTests(); + ug.completeCycleForUnitTests(); } if (listener.e != null) { throw listener.e; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java index 43008b16fa9..ccbfba6580b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/AbstractColumnSource.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.WritableChunk; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ColumnSource; @@ -15,6 +16,7 @@ import io.deephaven.engine.table.impl.chunkfilter.ChunkFilter; import io.deephaven.engine.table.impl.chunkfilter.ChunkMatchFilterFactory; import io.deephaven.engine.table.impl.sources.UnboxedLongBackedColumnSource; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.vector.*; import io.deephaven.hash.KeyedObjectHashSet; import io.deephaven.hash.KeyedObjectKey; @@ -43,6 +45,8 @@ public abstract class AbstractColumnSource implements protected final Class type; protected final Class componentType; + protected final UpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph(); + protected volatile Map groupToRange; protected volatile List> rowSetIndexerKey; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/BaseTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/BaseTable.java index 2959b0ec356..c2a3d87bdb8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/BaseTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/BaseTable.java @@ -10,7 +10,13 @@ import io.deephaven.base.reference.SimpleReference; import io.deephaven.base.reference.WeakSimpleReference; import io.deephaven.base.verify.Assert; +import io.deephaven.base.verify.Require; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.PoisonedUpdateGraph; +import io.deephaven.engine.exceptions.UpdateGraphConflictException; +import io.deephaven.engine.updategraph.NotificationQueue; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.exceptions.NotSortableException; import io.deephaven.engine.liveness.LivenessReferent; import io.deephaven.engine.rowset.RowSet; @@ -23,6 +29,7 @@ import io.deephaven.engine.table.impl.select.SwitchColumn; import io.deephaven.engine.table.impl.util.FieldUtils; import io.deephaven.engine.updategraph.*; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; import io.deephaven.hash.KeyedObjectHashSet; import io.deephaven.internal.log.LoggerFactory; @@ -63,7 +70,7 @@ public abstract class BaseTable> extends private static final Logger log = LoggerFactory.getLogger(BaseTable.class); private static final AtomicReferenceFieldUpdater CONDITION_UPDATER = - AtomicReferenceFieldUpdater.newUpdater(BaseTable.class, Condition.class, "updateGraphProcessorCondition"); + AtomicReferenceFieldUpdater.newUpdater(BaseTable.class, Condition.class, "updateGraphCondition"); private static final AtomicReferenceFieldUpdater PARENTS_UPDATER = AtomicReferenceFieldUpdater.newUpdater(BaseTable.class, Collection.class, "parents"); @@ -87,10 +94,15 @@ public abstract class BaseTable> extends */ protected final String description; + /** + * This table's update graph. + */ + protected final UpdateGraph updateGraph; + // Fields for DynamicNode implementation and update propagation support private volatile boolean refreshing; @SuppressWarnings({"FieldMayBeFinal", "unused"}) // Set via ensureField with CONDITION_UPDATER - private volatile Condition updateGraphProcessorCondition; + private volatile Condition updateGraphCondition; @SuppressWarnings("FieldMayBeFinal") // Set via ensureField with PARENTS_UPDATER private volatile Collection parents = EMPTY_PARENTS; @SuppressWarnings("FieldMayBeFinal") // Set via ensureField with CHILD_LISTENER_REFERENCES_UPDATER @@ -112,7 +124,8 @@ public BaseTable( super(attributes); this.definition = definition; this.description = description; - lastNotificationStep = LogicalClock.DEFAULT.currentStep(); + updateGraph = Require.neqNull(ExecutionContext.getContext().getUpdateGraph(), "UpdateGraph"); + lastNotificationStep = updateGraph.clock().currentStep(); // Properly flag this table as systemic or not. Note that we use the initial attributes map, rather than // getAttribute, in order to avoid triggering the "immutable after first access" restrictions of @@ -137,6 +150,11 @@ public String getDescription() { return description; } + @Override + public UpdateGraph getUpdateGraph() { + return updateGraph; + } + @Override public String toString() { return description; @@ -418,6 +436,10 @@ public Table removeBlink() { @Override public final void addParentReference(@NotNull final Object parent) { + if (parent instanceof NotificationQueue.Dependency) { + // Ensure that we are in the same update graph + getUpdateGraph((NotificationQueue.Dependency) parent); + } if (DynamicNode.notDynamicOrIsRefreshing(parent)) { setRefreshing(true); ensureParents().add(parent); @@ -443,8 +465,9 @@ public boolean satisfied(final long step) { // If we have no parents whatsoever then we are a source, and have no dependency chain other than the UGP // itself if (localParents.isEmpty()) { - if (UpdateGraphProcessor.DEFAULT.satisfied(step)) { - UpdateGraphProcessor.DEFAULT.logDependencies().append("Root node satisfied ").append(this).endl(); + if (updateGraph.satisfied(step)) { + updateGraph.logDependencies().append("Root node satisfied ").append(this) + .endl(); return true; } return false; @@ -455,7 +478,7 @@ public boolean satisfied(final long step) { for (Object parent : localParents) { if (parent instanceof NotificationQueue.Dependency) { if (!((NotificationQueue.Dependency) parent).satisfied(step)) { - UpdateGraphProcessor.DEFAULT.logDependencies() + updateGraph.logDependencies() .append("Parents dependencies not satisfied for ").append(this) .append(", parent=").append((NotificationQueue.Dependency) parent) .endl(); @@ -465,7 +488,7 @@ public boolean satisfied(final long step) { } } - UpdateGraphProcessor.DEFAULT.logDependencies() + updateGraph.logDependencies() .append("All parents dependencies satisfied for ").append(this) .endl(); @@ -476,27 +499,27 @@ public boolean satisfied(final long step) { @Override public void awaitUpdate() throws InterruptedException { - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(ensureCondition()::await); + updateGraph.exclusiveLock().doLocked(ensureCondition()::await); } @Override public boolean awaitUpdate(long timeout) throws InterruptedException { final MutableBoolean result = new MutableBoolean(false); - UpdateGraphProcessor.DEFAULT.exclusiveLock() - .doLocked(() -> result.setValue(ensureCondition().await(timeout, TimeUnit.MILLISECONDS))); + updateGraph.exclusiveLock().doLocked( + () -> result.setValue(ensureCondition().await(timeout, TimeUnit.MILLISECONDS))); return result.booleanValue(); } private Condition ensureCondition() { return FieldUtils.ensureField(this, CONDITION_UPDATER, null, - () -> UpdateGraphProcessor.DEFAULT.exclusiveLock().newCondition()); + () -> updateGraph.exclusiveLock().newCondition()); } private void maybeSignal() { - final Condition localCondition = updateGraphProcessorCondition; + final Condition localCondition = updateGraphCondition; if (localCondition != null) { - UpdateGraphProcessor.DEFAULT.requestSignal(localCondition); + updateGraph.requestSignal(localCondition); } } @@ -505,7 +528,7 @@ public void addUpdateListener(final ShiftObliviousListener listener, final boole addUpdateListener(new LegacyListenerAdapter(listener, getRowSet())); if (replayInitialImage) { if (isRefreshing()) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + updateGraph.checkInitiateSerialTableOperation(); } if (getRowSet().isNonempty()) { listener.setInitialImage(getRowSet()); @@ -520,6 +543,10 @@ public void addUpdateListener(final TableUpdateListener listener) { throw new IllegalStateException("Can not listen to failed table " + description); } if (isRefreshing()) { + // ensure that listener is in the same update graph if applicable + if (listener instanceof NotificationQueue.Dependency) { + getUpdateGraph((NotificationQueue.Dependency) listener); + } ensureChildListenerReferences().add(listener); } } @@ -557,6 +584,10 @@ public final boolean isRefreshing() { @Override public final boolean setRefreshing(boolean refreshing) { + if (refreshing && !updateGraph.supportsRefreshing()) { + throw new UpdateGraphConflictException("Attempt to setRefreshing(true) but Table was constructed with a " + + "static-only UpdateGraph."); + } return this.refreshing = refreshing; } @@ -594,9 +625,9 @@ public final void notifyListeners(RowSet added, RowSet removed, RowSet modified) */ public final void notifyListeners(final TableUpdate update) { Assert.eqFalse(isFailed, "isFailed"); - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); // tables may only be updated once per cycle - Assert.lt(lastNotificationStep, "lastNotificationStep", currentStep, "LogicalClock.DEFAULT.currentStep()"); + Assert.lt(lastNotificationStep, "lastNotificationStep", currentStep, "updateGraph.clock().currentStep()"); Assert.eqTrue(update.valid(), "update.valid()"); if (update.empty()) { @@ -757,8 +788,9 @@ private void validateUpdateOverlaps(final TableUpdate update) { */ public final void notifyListenersOnError(final Throwable e, @Nullable final TableListener.Entry sourceEntry) { Assert.eqFalse(isFailed, "isFailed"); - final long currentStep = LogicalClock.DEFAULT.currentStep(); - Assert.lt(lastNotificationStep, "lastNotificationStep", currentStep, "LogicalClock.DEFAULT.currentStep()"); + final long currentStep = updateGraph.clock().currentStep(); + Assert.lt(lastNotificationStep, "lastNotificationStep", currentStep, + "updateGraph.clock().currentStep()"); isFailed = true; maybeSignal(); @@ -772,13 +804,13 @@ public final void notifyListenersOnError(final Throwable e, @Nullable final Tabl /** * Get the notification queue to insert notifications into as they are generated by listeners during * {@link #notifyListeners} and {@link #notifyListenersOnError(Throwable, TableListener.Entry)}. This method may be - * overridden to provide a different notification queue than the {@link UpdateGraphProcessor#DEFAULT} instance for + * overridden to provide a different notification queue than the table's {@link PeriodicUpdateGraph} instance for * more complex behavior. * * @return The {@link NotificationQueue} to add to */ protected NotificationQueue getNotificationQueue() { - return UpdateGraphProcessor.DEFAULT; + return updateGraph; } @Override @@ -813,9 +845,9 @@ public static class ShiftObliviousListenerImpl extends ShiftObliviousInstrumente @ReferentialIntegrity private final Table parent; - private final BaseTable dependent; + private final BaseTable dependent; - public ShiftObliviousListenerImpl(String description, Table parent, BaseTable dependent) { + public ShiftObliviousListenerImpl(String description, Table parent, BaseTable dependent) { super(description); this.parent = parent; this.dependent = dependent; @@ -1212,7 +1244,7 @@ public Table setTotalsTable(String directive) { public static void initializeWithSnapshot( String logPrefix, SwapListener swapListener, ConstructSnapshot.SnapshotFunction snapshotFunction) { if (swapListener == null) { - snapshotFunction.call(false, LogicalClock.DEFAULT.currentValue()); + snapshotFunction.call(false, ExecutionContext.getContext().getUpdateGraph().clock().currentValue()); return; } ConstructSnapshot.callDataSnapshotFunction(logPrefix, swapListener.makeSnapshotControl(), snapshotFunction); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/BlinkTableTools.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/BlinkTableTools.java index 530a96b0b7b..190360cf3c3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/BlinkTableTools.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/BlinkTableTools.java @@ -4,6 +4,7 @@ package io.deephaven.engine.table.impl; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.RowSetShiftData; @@ -14,6 +15,8 @@ import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ReinterpretUtils; import io.deephaven.engine.table.impl.util.*; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.util.SafeCloseable; import org.apache.commons.lang3.mutable.Mutable; import org.apache.commons.lang3.mutable.MutableObject; @@ -36,6 +39,13 @@ public class BlinkTableTools { * @return An append-only in-memory table representing all data encountered in the blink table across all cycles */ public static Table blinkToAppendOnly(final Table blinkTable) { + final UpdateGraph updateGraph = blinkTable.getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return internalBlinkToAppendOnly(blinkTable); + } + } + + private static Table internalBlinkToAppendOnly(final Table blinkTable) { return QueryPerformanceRecorder.withNugget("blinkToAppendOnly", () -> { if (!isBlink(blinkTable)) { throw new IllegalArgumentException("Input is not a blink table!"); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/ConstituentDependency.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/ConstituentDependency.java index 1e4cf9e256d..941b625621e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/ConstituentDependency.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/ConstituentDependency.java @@ -14,7 +14,7 @@ import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.updategraph.NotificationQueue.Dependency; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.util.SafeCloseable; import io.deephaven.util.SafeCloseableArray; import org.jetbrains.annotations.NotNull; @@ -71,6 +71,11 @@ private ConstituentDependency( this.dependencyColumns = dependencyColumns; } + @Override + public UpdateGraph getUpdateGraph() { + return resultUpdatedDependency.getUpdateGraph(); + } + @Override public LogOutput append(@NotNull final LogOutput logOutput) { return logOutput.append("ConstituentDependency-").append(System.identityHashCode(this)); @@ -82,7 +87,7 @@ public boolean satisfied(final long step) { return true; } if (!resultUpdatedDependency.satisfied(step)) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Result updated dependency not satisfied for ").append(this) .append(", dependency=").append(resultUpdatedDependency) .endl(); @@ -118,7 +123,7 @@ public boolean satisfied(final long step) { for (int di = 0; di < numConstituents; ++di) { final Dependency constituent = dependencies.get(di); if (constituent != null && !constituent.satisfied(step)) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Constituent dependencies not satisfied for ") .append(this).append(", constituent=").append(constituent) .endl(); @@ -135,7 +140,7 @@ public boolean satisfied(final long step) { } Assert.eq(firstUnsatisfiedRowPosition, "firstUnsatisfiedRowPosition", resultRows.size(), "resultRows.size()"); - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("All constituent dependencies satisfied for ").append(this) .endl(); lastSatisfiedStep = step; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinShiftState.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinShiftState.java index fe54d7da7a6..e4ec0d123bd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinShiftState.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/CrossJoinShiftState.java @@ -4,12 +4,14 @@ package io.deephaven.engine.table.impl; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.updategraph.LogicalClock; /** * Shift state used by the {@link io.deephaven.engine.table.impl.sources.BitShiftingColumnSource}. */ public class CrossJoinShiftState { + private final LogicalClock clock; private final boolean leftOuterJoin; private int numShiftBits; private int prevNumShiftBits; @@ -19,6 +21,7 @@ public class CrossJoinShiftState { public CrossJoinShiftState(final int numInitialShiftBits, final boolean leftOuterJoin) { setNumShiftBits(numInitialShiftBits); + this.clock = ExecutionContext.getContext().getUpdateGraph().clock(); this.leftOuterJoin = leftOuterJoin; } @@ -33,7 +36,7 @@ void setNumShiftBitsAndUpdatePrev(final int newNumShiftBits) { Assert.lt(newNumShiftBits, "newNumShiftBits", 63, "63"); Assert.gt(newNumShiftBits, "newNumShiftBits", 0, "0"); - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = clock.currentStep(); if (updatedClockTick != currentStep) { prevMask = mask; prevNumShiftBits = numShiftBits; @@ -49,7 +52,7 @@ public int getNumShiftBits() { public int getPrevNumShiftBits() { if (updatedClockTick > 0) { - if (updatedClockTick == LogicalClock.DEFAULT.currentStep()) { + if (updatedClockTick == clock.currentStep()) { return prevNumShiftBits; } updatedClockTick = 0; @@ -83,7 +86,7 @@ private long getMask() { private long getPrevMask() { if (updatedClockTick > 0) { - if (updatedClockTick == LogicalClock.DEFAULT.currentStep()) { + if (updatedClockTick == clock.currentStep()) { return prevMask; } updatedClockTick = 0; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/DataAccessHelpers.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/DataAccessHelpers.java index 0442a4f0f7c..5998301db40 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/DataAccessHelpers.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/DataAccessHelpers.java @@ -1,7 +1,9 @@ package io.deephaven.engine.table.impl; +import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.DataColumn; import io.deephaven.engine.table.Table; +import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; import java.util.Arrays; @@ -36,12 +38,14 @@ public static DataColumn getColumn(Table table, @NotNull final String col // ----------------------------------------------------------------------------------------------------------------- public static Object[] getRecord(Table table, long rowNo, String... columnNames) { - final Table t = table.coalesce(); - final long key = t.getRowSet().get(rowNo); - return (columnNames.length > 0 - ? Arrays.stream(columnNames).map(t::getColumnSource) - : t.getColumnSources().stream()) - .map(columnSource -> columnSource.get(key)) - .toArray(Object[]::new); + try (final SafeCloseable ignored = LivenessScopeStack.open()) { + final Table t = table.coalesce(); + final long key = t.getRowSet().get(rowNo); + return (columnNames.length > 0 + ? Arrays.stream(columnNames).map(t::getColumnSource) + : t.getColumnSources().stream()) + .map(columnSource -> columnSource.get(key)) + .toArray(Object[]::new); + } } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/InstrumentedTableListenerBase.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/InstrumentedTableListenerBase.java index cff081ecf9c..db00935fab5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/InstrumentedTableListenerBase.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/InstrumentedTableListenerBase.java @@ -7,20 +7,19 @@ import io.deephaven.base.log.LogOutputAppendable; import io.deephaven.base.verify.Assert; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.NotificationQueue; import io.deephaven.engine.exceptions.UncheckedTableException; import io.deephaven.engine.table.TableListener; import io.deephaven.engine.table.TableUpdate; import io.deephaven.engine.table.impl.perf.PerformanceEntry; +import io.deephaven.engine.updategraph.*; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.AbstractNotification; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.io.log.LogEntry; import io.deephaven.io.log.impl.LogOutputStringImpl; import io.deephaven.io.logger.Logger; -import io.deephaven.engine.updategraph.NotificationQueue; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; import io.deephaven.engine.liveness.LivenessArtifact; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.table.impl.util.AsyncClientErrorNotifier; import io.deephaven.engine.table.impl.util.AsyncErrorLogger; import io.deephaven.engine.table.impl.perf.UpdatePerformanceTracker; @@ -42,6 +41,7 @@ public abstract class InstrumentedTableListenerBase extends LivenessArtifact private static final Logger log = LoggerFactory.getLogger(InstrumentedTableListenerBase.class); + private final UpdateGraph updateGraph; private final PerformanceEntry entry; private final boolean terminalListener; @@ -54,10 +54,16 @@ public abstract class InstrumentedTableListenerBase extends LivenessArtifact private volatile long lastEnqueuedStep = NotificationStepReceiver.NULL_NOTIFICATION_STEP; InstrumentedTableListenerBase(@Nullable String description, boolean terminalListener) { + this.updateGraph = ExecutionContext.getContext().getUpdateGraph(); this.entry = UpdatePerformanceTracker.getInstance().getEntry(description); this.terminalListener = terminalListener; } + @Override + public UpdateGraph getUpdateGraph() { + return updateGraph; + } + @Override public String toString() { return Utils.getSimpleNameFor(this) + '-' + entry.getDescription(); @@ -86,14 +92,14 @@ public LogOutput append(@NotNull final LogOutput logOutput) { } public boolean canExecute(final long step) { - return UpdateGraphProcessor.DEFAULT.satisfied(step); + return getUpdateGraph().satisfied(step); } @Override public boolean satisfied(final long step) { // Check and see if we've already been completed. if (lastCompletedStep == step) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Already completed notification for ").append(this).append(", step=").append(step).endl(); return true; } @@ -101,14 +107,14 @@ public boolean satisfied(final long step) { // This notification could be enqueued during the course of canExecute, but checking if we're enqueued is a very // cheap check that may let us avoid recursively checking all the dependencies. if (lastEnqueuedStep == step) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Enqueued notification for ").append(this).append(", step=").append(step).endl(); return false; } // Recursively check to see if our dependencies have been satisfied. if (!canExecute(step)) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Dependencies not yet satisfied for ").append(this).append(", step=").append(step).endl(); return false; } @@ -116,7 +122,7 @@ public boolean satisfied(final long step) { // Let's check again and see if we got lucky and another thread completed us while we were checking our // dependencies. if (lastCompletedStep == step) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Already completed notification during dependency check for ").append(this) .append(", step=").append(step) .endl(); @@ -126,14 +132,14 @@ public boolean satisfied(final long step) { // We check the queued notification step again after the dependency check. It is possible that something // enqueued us while we were evaluating the dependencies, and we must not miss that race. if (lastEnqueuedStep == step) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Enqueued notification during dependency check for ").append(this) .append(", step=").append(step) .endl(); return false; } - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Dependencies satisfied for ").append(this) .append(", lastCompleted=").append(lastCompletedStep) .append(", lastQueued=").append(lastEnqueuedStep) @@ -221,7 +227,7 @@ protected abstract class NotificationBase extends AbstractNotification implement super(terminalListener); this.update = update.acquire(); - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = getUpdateGraph().clock().currentStep(); if (lastCompletedStep == currentStep) { // noinspection ThrowableNotThrown Assert.statementNeverExecuted("Enqueued after lastCompletedStep already set to current step: " + this @@ -243,8 +249,10 @@ public final String toString() { @Override public final LogOutput append(LogOutput logOutput) { - return logOutput.append("Notification:(step=") - .append(LogicalClock.DEFAULT.currentStep()) + return logOutput.append("Notification:(updateGraph=") + .append(getUpdateGraph()) + .append(", step=") + .append(getUpdateGraph().clock().currentStep()) .append(", listener=") .append(System.identityHashCode(InstrumentedTableListenerBase.this)) .append(")") @@ -271,7 +279,7 @@ private void doRunInternal(final Runnable invokeOnUpdate) { entry.onUpdateStart(update.added(), update.removed(), update.modified(), update.shifted()); - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = getUpdateGraph().clock().currentStep(); try { Assert.eq(lastEnqueuedStep, "lastEnqueuedStep", currentStep, "currentStep"); if (lastCompletedStep >= currentStep) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/ListenerRecorder.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/ListenerRecorder.java index 5fd6981463b..6386ad7d5d4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/ListenerRecorder.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/ListenerRecorder.java @@ -4,6 +4,7 @@ package io.deephaven.engine.table.impl; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessManager; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -11,7 +12,6 @@ import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableUpdate; -import io.deephaven.engine.updategraph.LogicalClock; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -61,7 +61,7 @@ public void release() { @Override public void onUpdate(final TableUpdate upstream) { this.update = upstream.acquire(); - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = getUpdateGraph().clock().currentStep(); Assert.lt(this.notificationStep, "this.notificationStep", currentStep, "currentStep"); this.notificationStep = currentStep; @@ -75,7 +75,7 @@ public void onUpdate(final TableUpdate upstream) { @Override protected void onFailureInternal(@NotNull final Throwable originalException, @Nullable final Entry sourceEntry) { - this.notificationStep = LogicalClock.DEFAULT.currentStep(); + this.notificationStep = getUpdateGraph().clock().currentStep(); if (mergedListener == null) { throw new IllegalStateException("Merged listener not set"); } @@ -94,7 +94,7 @@ protected void destroy() { } public boolean recordedVariablesAreValid() { - return notificationStep == LogicalClock.DEFAULT.currentStep(); + return notificationStep == getUpdateGraph().clock().currentStep(); } public void setMergedListener(MergedListener mergedListener) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/MergedListener.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/MergedListener.java index 10fc3b8fafa..2a70f735825 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/MergedListener.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/MergedListener.java @@ -5,20 +5,20 @@ import io.deephaven.base.log.LogOutput; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.exceptions.UncheckedTableException; import io.deephaven.engine.liveness.LivenessArtifact; import io.deephaven.engine.table.TableListener; import io.deephaven.engine.table.impl.perf.BasePerformanceEntry; import io.deephaven.engine.table.impl.perf.PerformanceEntry; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; +import io.deephaven.engine.table.impl.perf.UpdatePerformanceTracker; +import io.deephaven.engine.table.impl.util.AsyncClientErrorNotifier; +import io.deephaven.engine.updategraph.AbstractNotification; import io.deephaven.engine.updategraph.NotificationQueue; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.AbstractNotification; -import io.deephaven.engine.table.impl.util.AsyncClientErrorNotifier; -import io.deephaven.engine.table.impl.perf.UpdatePerformanceTracker; +import io.deephaven.internal.log.LoggerFactory; +import io.deephaven.io.logger.Logger; import io.deephaven.util.annotations.ReferentialIntegrity; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -42,6 +42,7 @@ public abstract class MergedListener extends LivenessArtifact implements NotificationQueue.Dependency { private static final Logger log = LoggerFactory.getLogger(MergedListener.class); + private final UpdateGraph updateGraph; private final Iterable recorders; private final Iterable dependencies; private final String listenerDescription; @@ -63,6 +64,7 @@ protected MergedListener( Iterable dependencies, String listenerDescription, QueryTable result) { + this.updateGraph = ExecutionContext.getContext().getUpdateGraph(); this.recorders = recorders; recorders.forEach(this::manage); this.dependencies = dependencies; @@ -76,6 +78,11 @@ private void releaseFromRecorders() { recorders.forEach(ListenerRecorder::release); } + @Override + public UpdateGraph getUpdateGraph() { + return updateGraph; + } + public final void notifyOnUpstreamError( @NotNull final Throwable upstreamError, @Nullable final TableListener.Entry errorSourceEntry) { notifyInternal(upstreamError, errorSourceEntry); @@ -87,7 +94,7 @@ public void notifyChanges() { private void notifyInternal(@Nullable final Throwable upstreamError, @Nullable final TableListener.Entry errorSourceEntry) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = getUpdateGraph().clock().currentStep(); synchronized (this) { if (notificationStep == currentStep) { @@ -116,7 +123,7 @@ private void notifyInternal(@Nullable final Throwable upstreamError, queuedNotificationStep = currentStep; } - UpdateGraphProcessor.DEFAULT.addNotification(new MergedNotification()); + getUpdateGraph().addNotification(new MergedNotification()); } private void propagateError( @@ -139,7 +146,8 @@ protected boolean systemicResult() { protected void propagateErrorDownstream( final boolean fromProcess, @NotNull final Throwable error, @Nullable final TableListener.Entry entry) { - if (fromProcess && result.getLastNotificationStep() == LogicalClock.DEFAULT.currentStep()) { + if (fromProcess && result.getLastNotificationStep() == getUpdateGraph().clock() + .currentStep()) { // If the result managed to send its notification, we should not send our own on this cycle. if (!result.isFailed()) { // If the result isn't failed, we need to mark it as such on the next cycle. @@ -162,6 +170,7 @@ private static final class DelayedErrorNotifier implements Runnable { private final Throwable error; private final TableListener.Entry entry; private final Collection> targetReferences; + private final UpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph(); private DelayedErrorNotifier( @NotNull final Throwable error, @@ -170,7 +179,7 @@ private DelayedErrorNotifier( this.error = error; this.entry = entry; this.targetReferences = targets.stream().map(WeakReference::new).collect(Collectors.toList()); - UpdateGraphProcessor.DEFAULT.addSource(this); + updateGraph.addSource(this); } @Override @@ -179,7 +188,7 @@ public void run() { .map(WeakReference::get) .filter(Objects::nonNull) .forEach(t -> t.notifyListenersOnError(error, entry)); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } } @@ -201,7 +210,7 @@ protected boolean canExecute(final long step) { public boolean satisfied(final long step) { // Check and see if we've already been completed. if (lastCompletedStep == step) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Already completed notification for ").append(this).append(", step=").append(step).endl(); return true; } @@ -209,14 +218,14 @@ public boolean satisfied(final long step) { // This notification could be enqueued during the course of canExecute, but checking if we're enqueued is a very // cheap check that may let us avoid recursively checking all the dependencies. if (queuedNotificationStep == step) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Enqueued notification for ").append(this).append(", step=").append(step).endl(); return false; } // Recursively check to see if our dependencies have been satisfied. if (!canExecute(step)) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Dependencies not yet satisfied for ").append(this).append(", step=").append(step).endl(); return false; } @@ -224,7 +233,7 @@ public boolean satisfied(final long step) { // Let's check again and see if we got lucky and another thread completed us while we were checking our // dependencies. if (lastCompletedStep == step) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Already completed notification during dependency check for ").append(this) .append(", step=").append(step) .endl(); @@ -234,14 +243,14 @@ public boolean satisfied(final long step) { // We check the queued notification step again after the dependency check. It is possible that something // enqueued us while we were evaluating the dependencies, and we must not miss that race. if (queuedNotificationStep == step) { - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Enqueued notification during dependency check for ").append(this) .append(", step=").append(step) .endl(); return false; } - UpdateGraphProcessor.DEFAULT.logDependencies() + getUpdateGraph().logDependencies() .append("Dependencies satisfied for ").append(this) .append(", lastCompleted=").append(lastCompletedStep) .append(", lastQueued=").append(queuedNotificationStep) @@ -272,7 +281,7 @@ public MergedNotification() { @Override public void run() { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = getUpdateGraph().clock().currentStep(); try { if (queuedNotificationStep != currentStep) { // noinspection ConstantConditions @@ -312,7 +321,8 @@ public void run() { notificationStep = queuedNotificationStep; } process(); - UpdateGraphProcessor.DEFAULT.logDependencies().append("MergedListener has completed execution ") + getUpdateGraph().logDependencies() + .append("MergedListener has completed execution ") .append(this).endl(); } finally { entry.onUpdateEnd(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java index e5783ea4920..b87e877bba9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/QueryTable.java @@ -28,6 +28,7 @@ import io.deephaven.configuration.Configuration; import io.deephaven.datastructures.util.CollectionUtil; import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.exceptions.CancellationException; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.primitive.iterator.*; @@ -59,7 +60,6 @@ import io.deephaven.qst.table.AggregateAllTable; import io.deephaven.util.annotations.InternalUseOnly; import io.deephaven.vector.Vector; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.NotificationQueue; import io.deephaven.engine.table.impl.perf.QueryPerformanceRecorder; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; @@ -195,9 +195,9 @@ public interface MemoizableOperation * Values less than zero disable overhead checking, and result in never flattening the input. - * + *

* A value of zero results in always flattening the input. */ private static final double MAXIMUM_STATIC_SELECT_MEMORY_OVERHEAD = @@ -425,7 +425,7 @@ public CloseableIterator objectColumnIterator(@NotNull fi /** * Producers of tables should use the modified column set embedded within the table for their result. - * + *

* You must not mutate the result of this method if you are not generating the updates for this table. Callers * should not rely on the dirty state of this modified column set. * @@ -527,100 +527,127 @@ public ModifiedColumnSet.Transformer newModifiedColumnSetIdentityTransformer(fin @Override public PartitionedTable partitionBy(final boolean dropKeys, final String... keyColumnNames) { - if (isBlink()) { - throw unsupportedForBlinkTables("partitionBy"); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (isBlink()) { + throw unsupportedForBlinkTables("partitionBy"); + } + final List columns = ColumnName.from(keyColumnNames); + return memoizeResult(MemoizedOperationKey.partitionBy(dropKeys, columns), () -> { + final Table partitioned = aggBy(Partition.of(CONSTITUENT, !dropKeys), columns); + final Set keyColumnNamesSet = + Arrays.stream(keyColumnNames).collect(Collectors.toCollection(LinkedHashSet::new)); + final TableDefinition constituentDefinition; + if (dropKeys) { + constituentDefinition = TableDefinition.of(definition.getColumnStream() + .filter(cd -> !keyColumnNamesSet.contains(cd.getName())).toArray(ColumnDefinition[]::new)); + } else { + constituentDefinition = definition; + } + return new PartitionedTableImpl(partitioned, keyColumnNamesSet, true, CONSTITUENT.name(), + constituentDefinition, isRefreshing(), false); + }); } - final List columns = ColumnName.from(keyColumnNames); - return memoizeResult(MemoizedOperationKey.partitionBy(dropKeys, columns), () -> { - final Table partitioned = aggBy(Partition.of(CONSTITUENT, !dropKeys), columns); + } + + @Override + public PartitionedTable partitionedAggBy(final Collection aggregations, + final boolean preserveEmpty, @Nullable final Table initialGroups, final String... keyColumnNames) { + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (isBlink()) { + throw unsupportedForBlinkTables("partitionedAggBy"); + } + final Optional includedPartition = aggregations.stream() + .filter(agg -> agg instanceof Partition) + .map(agg -> (Partition) agg) + .findFirst(); + final Partition partition = includedPartition.orElseGet(() -> Partition.of(CONSTITUENT)); + final Collection aggregationsToUse = includedPartition.isPresent() + ? aggregations + : Stream.concat(aggregations.stream(), Stream.of(partition)).collect(Collectors.toList()); + final Table aggregated = + aggBy(aggregationsToUse, preserveEmpty, initialGroups, ColumnName.from(keyColumnNames)); final Set keyColumnNamesSet = Arrays.stream(keyColumnNames).collect(Collectors.toCollection(LinkedHashSet::new)); final TableDefinition constituentDefinition; - if (dropKeys) { + if (partition.includeGroupByColumns()) { + constituentDefinition = definition; + } else { constituentDefinition = TableDefinition.of(definition.getColumnStream() .filter(cd -> !keyColumnNamesSet.contains(cd.getName())).toArray(ColumnDefinition[]::new)); - } else { - constituentDefinition = definition; } - return new PartitionedTableImpl(partitioned, keyColumnNamesSet, true, CONSTITUENT.name(), + return new PartitionedTableImpl(aggregated, keyColumnNamesSet, true, partition.column().name(), constituentDefinition, isRefreshing(), false); - }); - } - - @Override - public PartitionedTable partitionedAggBy(final Collection aggregations, - final boolean preserveEmpty, @Nullable final Table initialGroups, final String... keyColumnNames) { - if (isBlink()) { - throw unsupportedForBlinkTables("partitionedAggBy"); - } - final Optional includedPartition = aggregations.stream() - .filter(agg -> agg instanceof Partition) - .map(agg -> (Partition) agg) - .findFirst(); - final Partition partition = includedPartition.orElseGet(() -> Partition.of(CONSTITUENT)); - final Collection aggregationsToUse = includedPartition.isPresent() - ? aggregations - : Stream.concat(aggregations.stream(), Stream.of(partition)).collect(Collectors.toList()); - final Table aggregated = - aggBy(aggregationsToUse, preserveEmpty, initialGroups, ColumnName.from(keyColumnNames)); - final Set keyColumnNamesSet = - Arrays.stream(keyColumnNames).collect(Collectors.toCollection(LinkedHashSet::new)); - final TableDefinition constituentDefinition; - if (partition.includeGroupByColumns()) { - constituentDefinition = definition; - } else { - constituentDefinition = TableDefinition.of(definition.getColumnStream() - .filter(cd -> !keyColumnNamesSet.contains(cd.getName())).toArray(ColumnDefinition[]::new)); } - return new PartitionedTableImpl(aggregated, keyColumnNamesSet, true, partition.column().name(), - constituentDefinition, isRefreshing(), false); } @Override public RollupTable rollup(final Collection aggregations, final boolean includeConstituents, final Collection groupByColumns) { - if (isBlink() && includeConstituents) { - throw unsupportedForBlinkTables("rollup with included constituents"); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (isBlink() && includeConstituents) { + throw unsupportedForBlinkTables("rollup with included constituents"); + } + return memoizeResult(MemoizedOperationKey.rollup(aggregations, groupByColumns, includeConstituents), + () -> RollupTableImpl.makeRollup(this, aggregations, includeConstituents, groupByColumns)); } - return memoizeResult(MemoizedOperationKey.rollup(aggregations, groupByColumns, includeConstituents), - () -> RollupTableImpl.makeRollup(this, aggregations, includeConstituents, groupByColumns)); } @Override public TreeTable tree(String idColumn, String parentColumn) { - if (isBlink()) { - throw unsupportedForBlinkTables("tree"); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (isBlink()) { + throw unsupportedForBlinkTables("tree"); + } + return memoizeResult(MemoizedOperationKey.tree(idColumn, parentColumn), + () -> TreeTableImpl.makeTree(this, ColumnName.of(idColumn), ColumnName.of(parentColumn))); } - return memoizeResult(MemoizedOperationKey.tree(idColumn, parentColumn), - () -> TreeTableImpl.makeTree(this, ColumnName.of(idColumn), ColumnName.of(parentColumn))); } @Override public Table slice(final long firstPositionInclusive, final long lastPositionExclusive) { - if (firstPositionInclusive == lastPositionExclusive) { - return getSubTable(RowSetFactory.empty().toTracking()); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (firstPositionInclusive == lastPositionExclusive) { + return getSubTable(RowSetFactory.empty().toTracking()); + } + return getResult(SliceLikeOperation.slice(this, firstPositionInclusive, lastPositionExclusive, "slice")); } - return getResult(SliceLikeOperation.slice(this, firstPositionInclusive, lastPositionExclusive, "slice")); } @Override public Table head(final long size) { - return slice(0, Require.geqZero(size, "size")); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return slice(0, Require.geqZero(size, "size")); + } } @Override public Table tail(final long size) { - return slice(-Require.geqZero(size, "size"), 0); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return slice(-Require.geqZero(size, "size"), 0); + } } @Override public Table headPct(final double percent) { - return getResult(SliceLikeOperation.headPct(this, percent)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return getResult(SliceLikeOperation.headPct(this, percent)); + } } @Override public Table tailPct(final double percent) { - return getResult(SliceLikeOperation.tailPct(this, percent)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return getResult(SliceLikeOperation.tailPct(this, percent)); + } } @Override @@ -635,11 +662,14 @@ public Table exactJoin( } private Table exactJoinImpl(Table table, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd) { - return QueryPerformanceRecorder.withNugget( - "exactJoin(" + table + "," + Arrays.toString(columnsToMatch) + "," + Arrays.toString(columnsToMatch) - + ")", - sizeForInstrumentation(), - () -> naturalJoinInternal(table, columnsToMatch, columnsToAdd, true)); + final UpdateGraph updateGraph = getUpdateGraph(table); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return QueryPerformanceRecorder.withNugget( + "exactJoin(" + table + "," + Arrays.toString(columnsToMatch) + "," + Arrays.toString(columnsToMatch) + + ")", + sizeForInstrumentation(), + () -> naturalJoinInternal(table, columnsToMatch, columnsToAdd, true)); + } } private static String toString(Collection groupByList) { @@ -648,29 +678,33 @@ private static String toString(Collection groupByList) { @Override public Table aggAllBy(AggSpec spec, ColumnName... groupByColumns) { - for (ColumnName name : AggSpecColumnReferences.of(spec)) { - if (!hasColumns(name.name())) { + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + for (ColumnName name : AggSpecColumnReferences.of(spec)) { + if (!hasColumns(name.name())) { + throw new IllegalArgumentException( + "aggAllBy spec references column that does not exist: spec=" + spec + ", groupByColumns=" + + toString(Arrays.asList(groupByColumns))); + } + } + final List groupByList = Arrays.asList(groupByColumns); + final List tableColumns = definition.getTypedColumnNames(); + final Optional agg = AggregateAllTable.singleAggregation(spec, groupByList, tableColumns); + if (agg.isEmpty()) { throw new IllegalArgumentException( - "aggAllBy spec references column that does not exist: spec=" + spec + ", groupByColumns=" - + toString(Arrays.asList(groupByColumns))); - } - } - final List groupByList = Arrays.asList(groupByColumns); - final List tableColumns = definition.getTypedColumnNames(); - final Optional agg = AggregateAllTable.singleAggregation(spec, groupByList, tableColumns); - if (agg.isEmpty()) { - throw new IllegalArgumentException( - "aggAllBy has no columns to aggregate: spec=" + spec + ", groupByColumns=" + toString(groupByList)); - } - final QueryTable tableToUse = (QueryTable) AggAllByUseTable.of(this, spec); - final List aggs = List.of(agg.get()); - final MemoizedOperationKey aggKey = MemoizedOperationKey.aggBy(aggs, false, null, groupByList); - return tableToUse.memoizeResult(aggKey, () -> { - final QueryTable result = - tableToUse.aggNoMemo(AggregationProcessor.forAggregation(aggs), false, null, groupByList); - spec.walk(new AggAllByCopyAttributes(this, result)); - return result; - }); + "aggAllBy has no columns to aggregate: spec=" + spec + ", groupByColumns=" + + toString(groupByList)); + } + final QueryTable tableToUse = (QueryTable) AggAllByUseTable.of(this, spec); + final List aggs = List.of(agg.get()); + final MemoizedOperationKey aggKey = MemoizedOperationKey.aggBy(aggs, false, null, groupByList); + return tableToUse.memoizeResult(aggKey, () -> { + final QueryTable result = + tableToUse.aggNoMemo(AggregationProcessor.forAggregation(aggs), false, null, groupByList); + spec.walk(new AggAllByCopyAttributes(this, result)); + return result; + }); + } } @Override @@ -679,27 +713,30 @@ public Table aggBy( final boolean preserveEmpty, final Table initialGroups, final Collection groupByColumns) { - if (aggregations.isEmpty()) { - throw new IllegalArgumentException( - "aggBy must have at least one aggregation, none specified. groupByColumns=" - + toString(groupByColumns)); - } - final List optimized = AggregationOptimizer.of(aggregations); - final MemoizedOperationKey aggKey = - MemoizedOperationKey.aggBy(optimized, preserveEmpty, initialGroups, groupByColumns); - final Table aggregationTable = memoizeResult(aggKey, () -> aggNoMemo( - AggregationProcessor.forAggregation(optimized), preserveEmpty, initialGroups, groupByColumns)); + final UpdateGraph updateGraph = getUpdateGraph(initialGroups); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (aggregations.isEmpty()) { + throw new IllegalArgumentException( + "aggBy must have at least one aggregation, none specified. groupByColumns=" + + toString(groupByColumns)); + } + final List optimized = AggregationOptimizer.of(aggregations); + final MemoizedOperationKey aggKey = + MemoizedOperationKey.aggBy(optimized, preserveEmpty, initialGroups, groupByColumns); + final Table aggregationTable = memoizeResult(aggKey, () -> aggNoMemo( + AggregationProcessor.forAggregation(optimized), preserveEmpty, initialGroups, groupByColumns)); + + final List optimizedOrder = AggregationOutputs.of(optimized).collect(Collectors.toList()); + final List userOrder = AggregationOutputs.of(aggregations).collect(Collectors.toList()); + if (userOrder.equals(optimizedOrder)) { + return aggregationTable; + } - final List optimizedOrder = AggregationOutputs.of(optimized).collect(Collectors.toList()); - final List userOrder = AggregationOutputs.of(aggregations).collect(Collectors.toList()); - if (userOrder.equals(optimizedOrder)) { - return aggregationTable; + // We need to re-order the result columns to match the user-provided order + final List resultOrder = + Stream.concat(groupByColumns.stream(), userOrder.stream()).collect(Collectors.toList()); + return aggregationTable.view(resultOrder); } - - // We need to re-order the result columns to match the user-provided order - final List resultOrder = - Stream.concat(groupByColumns.stream(), userOrder.stream()).collect(Collectors.toList()); - return aggregationTable.view(resultOrder); } public QueryTable aggNoMemo( @@ -707,11 +744,14 @@ public QueryTable aggNoMemo( final boolean preserveEmpty, @Nullable final Table initialGroups, @NotNull final Collection groupByColumns) { - final String description = "aggregation(" + aggregationContextFactory - + ", " + groupByColumns + ")"; - return QueryPerformanceRecorder.withNugget(description, sizeForInstrumentation(), - () -> ChunkedOperatorAggregationHelper.aggregation( - aggregationContextFactory, this, preserveEmpty, initialGroups, groupByColumns)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + final String description = "aggregation(" + aggregationContextFactory + + ", " + groupByColumns + ")"; + return QueryPerformanceRecorder.withNugget(description, sizeForInstrumentation(), + () -> ChunkedOperatorAggregationHelper.aggregation( + aggregationContextFactory, this, preserveEmpty, initialGroups, groupByColumns)); + } } private static UnsupportedOperationException unsupportedForBlinkTables(@NotNull final String operationName) { @@ -721,14 +761,20 @@ private static UnsupportedOperationException unsupportedForBlinkTables(@NotNull @Override public Table headBy(long nRows, String... groupByColumns) { - return QueryPerformanceRecorder.withNugget("headBy(" + nRows + ", " + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), () -> headOrTailBy(nRows, true, groupByColumns)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return QueryPerformanceRecorder.withNugget("headBy(" + nRows + ", " + Arrays.toString(groupByColumns) + ")", + sizeForInstrumentation(), () -> headOrTailBy(nRows, true, groupByColumns)); + } } @Override public Table tailBy(long nRows, String... groupByColumns) { - return QueryPerformanceRecorder.withNugget("tailBy(" + nRows + ", " + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), () -> headOrTailBy(nRows, false, groupByColumns)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return QueryPerformanceRecorder.withNugget("tailBy(" + nRows + ", " + Arrays.toString(groupByColumns) + ")", + sizeForInstrumentation(), () -> headOrTailBy(nRows, false, groupByColumns)); + } } private Table headOrTailBy(long nRows, boolean head, String... groupByColumns) { @@ -808,49 +854,54 @@ private String getCastFormulaInternal(Class dataType) { @Override public Table moveColumns(int index, boolean moveToEnd, String... columnsToMove) { - // Get the current columns - final List currentColumns = getDefinition().getColumnNames(); - - // Create a Set from columnsToMove. This way, we can rename and rearrange columns at once. - final Set leftColsToMove = new HashSet<>(); - final Set rightColsToMove = new HashSet<>(); - int extraCols = 0; - - for (final String columnToMove : columnsToMove) { - final String left = MatchPairFactory.getExpression(columnToMove).leftColumn; - final String right = MatchPairFactory.getExpression(columnToMove).rightColumn; - - if (!leftColsToMove.add(left) || !currentColumns.contains(left) || (rightColsToMove.contains(left) - && !left.equals(right) && leftColsToMove.stream().anyMatch(col -> col.equals(right)))) { - extraCols++; - } - if (currentColumns.stream().anyMatch(currentColumn -> currentColumn.equals(right)) && !left.equals(right) - && rightColsToMove.add(right) && !rightColsToMove.contains(left)) { - extraCols--; + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + // Get the current columns + final List currentColumns = getDefinition().getColumnNames(); + + // Create a Set from columnsToMove. This way, we can rename and rearrange columns at once. + final Set leftColsToMove = new HashSet<>(); + final Set rightColsToMove = new HashSet<>(); + int extraCols = 0; + + for (final String columnToMove : columnsToMove) { + final String left = MatchPairFactory.getExpression(columnToMove).leftColumn; + final String right = MatchPairFactory.getExpression(columnToMove).rightColumn; + + if (!leftColsToMove.add(left) || !currentColumns.contains(left) || (rightColsToMove.contains(left) + && !left.equals(right) && leftColsToMove.stream().anyMatch(col -> col.equals(right)))) { + extraCols++; + } + if (currentColumns.stream().anyMatch(currentColumn -> currentColumn.equals(right)) + && !left.equals(right) + && rightColsToMove.add(right) && !rightColsToMove.contains(left)) { + extraCols--; + } } - } - index += moveToEnd ? extraCols : 0; + index += moveToEnd ? extraCols : 0; - // vci for write, cci for currentColumns, ctmi for columnsToMove - final SelectColumn[] viewColumns = new SelectColumn[currentColumns.size() + extraCols]; - for (int vci = 0, cci = 0, ctmi = 0; vci < viewColumns.length;) { - if (vci >= index && ctmi < columnsToMove.length) { - viewColumns[vci++] = SelectColumnFactory.getExpression(columnsToMove[ctmi++]); - } else { - // Don't add the column if it's one of the columns we're moving or if it has been renamed. - final String currentColumn = currentColumns.get(cci++); - if (!leftColsToMove.contains(currentColumn) - && Arrays.stream(viewColumns).noneMatch( - viewCol -> viewCol != null && viewCol.getMatchPair().leftColumn.equals(currentColumn)) - && Arrays.stream(columnsToMove) - .noneMatch(colToMove -> MatchPairFactory.getExpression(colToMove).rightColumn - .equals(currentColumn))) { - - viewColumns[vci++] = SelectColumnFactory.getExpression(currentColumn); + // vci for write, cci for currentColumns, ctmi for columnsToMove + final SelectColumn[] viewColumns = new SelectColumn[currentColumns.size() + extraCols]; + for (int vci = 0, cci = 0, ctmi = 0; vci < viewColumns.length;) { + if (vci >= index && ctmi < columnsToMove.length) { + viewColumns[vci++] = SelectColumnFactory.getExpression(columnsToMove[ctmi++]); + } else { + // Don't add the column if it's one of the columns we're moving or if it has been renamed. + final String currentColumn = currentColumns.get(cci++); + if (!leftColsToMove.contains(currentColumn) + && Arrays.stream(viewColumns).noneMatch( + viewCol -> viewCol != null + && viewCol.getMatchPair().leftColumn.equals(currentColumn)) + && Arrays.stream(columnsToMove) + .noneMatch(colToMove -> MatchPairFactory.getExpression(colToMove).rightColumn + .equals(currentColumn))) { + + viewColumns[vci++] = SelectColumnFactory.getExpression(currentColumn); + } } } + return viewOrUpdateView(Flavor.View, viewColumns); } - return viewOrUpdateView(Flavor.View, viewColumns); } public static class FilteredTable extends QueryTable implements WhereFilter.RecomputeListener { @@ -918,7 +969,7 @@ public void setIsRefreshing(boolean refreshing) { /** * Refilter relevant rows. - * + *

* This method is not part of the public API, and is only exposed so that {@link WhereListener} can access it. * * @param upstream the upstream update @@ -1040,7 +1091,10 @@ private void setWhereListener(MergedListener whereListener) { @Override public Table where(Filter filter) { - return whereInternal(WhereFilter.fromInternal(filter)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return whereInternal(WhereFilter.fromInternal(filter)); + } } private QueryTable whereInternal(final WhereFilter... filters) { @@ -1116,11 +1170,14 @@ void handleUncaughtException(Exception throwable) { currentMappingFuture.completeExceptionally(throwable); } }; + final ExecutionContext executionContext = ExecutionContext.getContext(); initialFilterExecution.scheduleCompletion(x -> { - if (x.exceptionResult != null) { - currentMappingFuture.completeExceptionally(x.exceptionResult); - } else { - currentMappingFuture.complete(x.addedResult.toTracking()); + try (final SafeCloseable ignored = executionContext.open()) { + if (x.exceptionResult != null) { + currentMappingFuture.completeExceptionally(x.exceptionResult); + } else { + currentMappingFuture.complete(x.addedResult.toTracking()); + } } }); @@ -1231,12 +1288,18 @@ protected WritableRowSet filterRows(RowSet currentMapping, RowSet fullSet, boole @Override public Table whereIn(Table rightTable, Collection columnsToMatch) { - return whereInInternal(rightTable, true, MatchPair.fromMatches(columnsToMatch)); + final UpdateGraph updateGraph = getUpdateGraph(rightTable); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return whereInInternal(rightTable, true, MatchPair.fromMatches(columnsToMatch)); + } } @Override public Table whereNotIn(Table rightTable, Collection columnsToMatch) { - return whereInInternal(rightTable, false, MatchPair.fromMatches(columnsToMatch)); + final UpdateGraph updateGraph = getUpdateGraph(rightTable); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return whereInInternal(rightTable, false, MatchPair.fromMatches(columnsToMatch)); + } } private Table whereInInternal(final Table rightTable, final boolean inclusion, @@ -1269,16 +1332,20 @@ private Table whereInInternal(final Table rightTable, final boolean inclusion, @Override public Table flatten() { - if (!isFlat() && !isRefreshing() && rowSet.size() - 1 == rowSet.lastRowKey()) { - // We're already flat, and we'll never update; so we can just return ourselves, after setting ourselves flat - setFlat(); - } + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (!isFlat() && !isRefreshing() && rowSet.size() - 1 == rowSet.lastRowKey()) { + // We're already flat, and we'll never update; so we can just return ourselves, after setting ourselves + // flat + setFlat(); + } - if (isFlat()) { - return prepareReturnThis(); - } + if (isFlat()) { + return prepareReturnThis(); + } - return getResult(new FlattenOperation(this)); + return getResult(new FlattenOperation(this)); + } } public void setFlat() { @@ -1303,7 +1370,10 @@ public void releaseCachedResources() { @Override public Table select(Collection columns) { - return selectInternal(SelectColumn.from(columns.isEmpty() ? definition.getTypedColumnNames() : columns)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return selectInternal(SelectColumn.from(columns.isEmpty() ? definition.getTypedColumnNames() : columns)); + } } private Table selectInternal(SelectColumn... selectColumns) { @@ -1321,14 +1391,17 @@ private boolean exceedsMaximumStaticSelectOverhead() { @Override public Table update(final Collection newColumns) { - return selectOrUpdate(Flavor.Update, SelectColumn.from(newColumns)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return selectOrUpdate(Flavor.Update, SelectColumn.from(newColumns)); + } } /** * This does a certain amount of validation and can be used to get confidence that the formulas are valid. If it is * not valid, you will get an exception. Positive test (should pass validation): "X = 12", "Y = X + 1") Negative * test (should fail validation): "X = 12", "Y = Z + 1") - * + *

* DO NOT USE -- this API is in flux and may change or disappear in the future. */ public SelectValidationResult validateSelect(final SelectColumn... selectColumns) { @@ -1388,7 +1461,8 @@ this, mode, columns, rowSet, getModifiedColumnSetForUpdates(), publishTheseSourc final QueryTable resultTable; final LivenessScope liveResultCapture = isRefreshing() ? new LivenessScope() : null; try (final SafeCloseable ignored1 = liveResultCapture != null ? liveResultCapture::release : null; - final SafeCloseable ignored2 = ExecutionContext.getDefaultContext().open()) { + final SafeCloseable ignored2 = + ExecutionContext.getDefaultContext().withUpdateGraph(getUpdateGraph()).open()) { // we open the default context here to ensure that the update processing happens in the default // context whether it is processed in parallel or not try (final RowSet emptyRowSet = RowSetFactory.empty(); @@ -1513,15 +1587,21 @@ private void propagateGrouping(SelectColumn[] selectColumns, QueryTable resultTa @Override public Table view(final Collection viewColumns) { - if (viewColumns == null || viewColumns.isEmpty()) { - return prepareReturnThis(); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (viewColumns == null || viewColumns.isEmpty()) { + return prepareReturnThis(); + } + return viewOrUpdateView(Flavor.View, SelectColumn.from(viewColumns)); } - return viewOrUpdateView(Flavor.View, SelectColumn.from(viewColumns)); } @Override public Table updateView(final Collection viewColumns) { - return viewOrUpdateView(Flavor.UpdateView, SelectColumn.from(viewColumns)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return viewOrUpdateView(Flavor.UpdateView, SelectColumn.from(viewColumns)); + } } private Table viewOrUpdateView(Flavor flavor, final SelectColumn... viewColumns) { @@ -1626,106 +1706,116 @@ public void onUpdate(final TableUpdate upstream) { @Override public Table lazyUpdate(final Collection newColumns) { - final SelectColumn[] selectColumns = SelectColumn.from(newColumns); - return QueryPerformanceRecorder.withNugget("lazyUpdate(" + selectColumnString(selectColumns) + ")", - sizeForInstrumentation(), () -> { - checkInitiateOperation(); - - final SelectAndViewAnalyzerWrapper analyzerWrapper = SelectAndViewAnalyzer.create( - this, SelectAndViewAnalyzer.Mode.VIEW_LAZY, columns, rowSet, - getModifiedColumnSetForUpdates(), - true, false, selectColumns); - final SelectColumn[] processedColumns = analyzerWrapper.getProcessedColumns() - .toArray(SelectColumn[]::new); - final QueryTable result = new QueryTable( - rowSet, analyzerWrapper.getPublishedColumnResources()); - if (isRefreshing()) { - addUpdateListener(new ListenerImpl( - "lazyUpdate(" + Arrays.deepToString(processedColumns) + ')', this, result)); - } - propagateFlatness(result); - copyAttributes(result, CopyAttributeOperation.UpdateView); - copySortableColumns(result, processedColumns); - maybeCopyColumnDescriptions(result, processedColumns); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + final SelectColumn[] selectColumns = SelectColumn.from(newColumns); + return QueryPerformanceRecorder.withNugget("lazyUpdate(" + selectColumnString(selectColumns) + ")", + sizeForInstrumentation(), () -> { + checkInitiateOperation(); + + final SelectAndViewAnalyzerWrapper analyzerWrapper = SelectAndViewAnalyzer.create( + this, SelectAndViewAnalyzer.Mode.VIEW_LAZY, columns, rowSet, + getModifiedColumnSetForUpdates(), + true, false, selectColumns); + final SelectColumn[] processedColumns = analyzerWrapper.getProcessedColumns() + .toArray(SelectColumn[]::new); + final QueryTable result = new QueryTable( + rowSet, analyzerWrapper.getPublishedColumnResources()); + if (isRefreshing()) { + addUpdateListener(new ListenerImpl( + "lazyUpdate(" + Arrays.deepToString(processedColumns) + ')', this, result)); + } + propagateFlatness(result); + copyAttributes(result, CopyAttributeOperation.UpdateView); + copySortableColumns(result, processedColumns); + maybeCopyColumnDescriptions(result, processedColumns); - return analyzerWrapper.applyShiftsAndRemainingColumns( - this, result, SelectAndViewAnalyzerWrapper.UpdateFlavor.LazyUpdate); - }); + return analyzerWrapper.applyShiftsAndRemainingColumns( + this, result, SelectAndViewAnalyzerWrapper.UpdateFlavor.LazyUpdate); + }); + } } @Override public Table dropColumns(String... columnNames) { - if (columnNames == null || columnNames.length == 0) { - return prepareReturnThis(); - } - return memoizeResult(MemoizedOperationKey.dropColumns(columnNames), () -> QueryPerformanceRecorder - .withNugget("dropColumns(" + Arrays.toString(columnNames) + ")", sizeForInstrumentation(), () -> { - final Mutable result = new MutableObject<>(); - - final Set existingColumns = new HashSet<>(definition.getColumnNames()); - final Set columnNamesToDrop = new HashSet<>(Arrays.asList(columnNames)); - if (!existingColumns.containsAll(columnNamesToDrop)) { - columnNamesToDrop.removeAll(existingColumns); - throw new RuntimeException("Unknown columns: " + columnNamesToDrop - + ", available columns = " + getColumnSourceMap().keySet()); - } - final Map> newColumns = new LinkedHashMap<>(columns); - for (String columnName : columnNames) { - newColumns.remove(columnName); - } - - final SwapListener swapListener = - createSwapListenerIfRefreshing(SwapListener::new); - - initializeWithSnapshot("dropColumns", swapListener, (usePrev, beforeClockValue) -> { - final QueryTable resultTable = new QueryTable(rowSet, newColumns); - propagateFlatness(resultTable); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (columnNames == null || columnNames.length == 0) { + return prepareReturnThis(); + } + return memoizeResult(MemoizedOperationKey.dropColumns(columnNames), () -> QueryPerformanceRecorder + .withNugget("dropColumns(" + Arrays.toString(columnNames) + ")", sizeForInstrumentation(), () -> { + final Mutable
result = new MutableObject<>(); + + final Set existingColumns = new HashSet<>(definition.getColumnNames()); + final Set columnNamesToDrop = new HashSet<>(Arrays.asList(columnNames)); + if (!existingColumns.containsAll(columnNamesToDrop)) { + columnNamesToDrop.removeAll(existingColumns); + throw new RuntimeException("Unknown columns: " + columnNamesToDrop + + ", available columns = " + getColumnSourceMap().keySet()); + } + final Map> newColumns = new LinkedHashMap<>(columns); + for (String columnName : columnNames) { + newColumns.remove(columnName); + } - copyAttributes(resultTable, CopyAttributeOperation.DropColumns); - copySortableColumns(resultTable, resultTable.getDefinition().getColumnNameMap()::containsKey); - maybeCopyColumnDescriptions(resultTable); + final SwapListener swapListener = + createSwapListenerIfRefreshing(SwapListener::new); - if (swapListener != null) { - final ModifiedColumnSet.Transformer mcsTransformer = - newModifiedColumnSetTransformer(resultTable, - resultTable.getColumnSourceMap().keySet() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); - final ListenerImpl listener = new ListenerImpl( - "dropColumns(" + Arrays.deepToString(columnNames) + ')', this, resultTable) { - @Override - public void onUpdate(final TableUpdate upstream) { - final TableUpdateImpl downstream = TableUpdateImpl.copy(upstream); - final ModifiedColumnSet resultModifiedColumnSet = - resultTable.getModifiedColumnSetForUpdates(); - mcsTransformer.clearAndTransform(upstream.modifiedColumnSet(), - resultModifiedColumnSet); - if (upstream.modified().isEmpty() || resultModifiedColumnSet.empty()) { - downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; - if (downstream.modified().isNonempty()) { - downstream.modified().close(); - downstream.modified = RowSetFactory.empty(); + initializeWithSnapshot("dropColumns", swapListener, (usePrev, beforeClockValue) -> { + final QueryTable resultTable = new QueryTable(rowSet, newColumns); + propagateFlatness(resultTable); + + copyAttributes(resultTable, CopyAttributeOperation.DropColumns); + copySortableColumns(resultTable, + resultTable.getDefinition().getColumnNameMap()::containsKey); + maybeCopyColumnDescriptions(resultTable); + + if (swapListener != null) { + final ModifiedColumnSet.Transformer mcsTransformer = + newModifiedColumnSetTransformer(resultTable, + resultTable.getColumnSourceMap().keySet() + .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + final ListenerImpl listener = new ListenerImpl( + "dropColumns(" + Arrays.deepToString(columnNames) + ')', this, resultTable) { + @Override + public void onUpdate(final TableUpdate upstream) { + final TableUpdateImpl downstream = TableUpdateImpl.copy(upstream); + final ModifiedColumnSet resultModifiedColumnSet = + resultTable.getModifiedColumnSetForUpdates(); + mcsTransformer.clearAndTransform(upstream.modifiedColumnSet(), + resultModifiedColumnSet); + if (upstream.modified().isEmpty() || resultModifiedColumnSet.empty()) { + downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; + if (downstream.modified().isNonempty()) { + downstream.modified().close(); + downstream.modified = RowSetFactory.empty(); + } + } else { + downstream.modifiedColumnSet = resultModifiedColumnSet; } - } else { - downstream.modifiedColumnSet = resultModifiedColumnSet; + resultTable.notifyListeners(downstream); } - resultTable.notifyListeners(downstream); - } - }; - swapListener.setListenerAndResult(listener, resultTable); - } + }; + swapListener.setListenerAndResult(listener, resultTable); + } - result.setValue(resultTable); + result.setValue(resultTable); - return true; - }); + return true; + }); - return result.getValue(); - })); + return result.getValue(); + })); + } } @Override public Table renameColumns(Collection pairs) { - return renameColumnsImpl(MatchPair.fromPairs(pairs)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return renameColumnsImpl(MatchPair.fromPairs(pairs)); + } } private Table renameColumnsImpl(MatchPair... pairs) { @@ -1820,28 +1910,35 @@ public Table asOfJoin( private Table ajImpl(final Table rightTable, final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, AsOfJoinRule joinRule) { - if (rightTable == null) { - throw new IllegalArgumentException("aj() requires a non-null right hand side table."); + final UpdateGraph updateGraph = getUpdateGraph(rightTable); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (rightTable == null) { + throw new IllegalArgumentException("aj() requires a non-null right hand side table."); + } + final Table rightTableCoalesced = rightTable.coalesce(); + return QueryPerformanceRecorder.withNugget( + "aj(" + "rightTable, " + matchString(columnsToMatch) + ", " + joinRule + ", " + + matchString(columnsToAdd) + ")", + () -> ajInternal(rightTableCoalesced, columnsToMatch, columnsToAdd, SortingOrder.Ascending, + joinRule)); } - final Table rightTableCoalesced = rightTable.coalesce(); - return QueryPerformanceRecorder.withNugget( - "aj(" + "rightTable, " + matchString(columnsToMatch) + ", " + joinRule + ", " - + matchString(columnsToAdd) + ")", - () -> ajInternal(rightTableCoalesced, columnsToMatch, columnsToAdd, SortingOrder.Ascending, - joinRule)); } private Table rajImpl(final Table rightTable, final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, AsOfJoinRule joinRule) { - if (rightTable == null) { - throw new IllegalArgumentException("raj() requires a non-null right hand side table."); + final UpdateGraph updateGraph = getUpdateGraph(rightTable); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + if (rightTable == null) { + throw new IllegalArgumentException("raj() requires a non-null right hand side table."); + } + final Table rightTableCoalesced = rightTable.coalesce(); + return QueryPerformanceRecorder.withNugget( + "raj(" + "rightTable, " + matchString(columnsToMatch) + ", " + joinRule + ", " + + matchString(columnsToAdd) + ")", + () -> ajInternal(rightTableCoalesced.reverse(), columnsToMatch, columnsToAdd, + SortingOrder.Descending, + joinRule)); } - final Table rightTableCoalesced = rightTable.coalesce(); - return QueryPerformanceRecorder.withNugget( - "raj(" + "rightTable, " + matchString(columnsToMatch) + ", " + joinRule + ", " - + matchString(columnsToAdd) + ")", - () -> ajInternal(rightTableCoalesced.reverse(), columnsToMatch, columnsToAdd, SortingOrder.Descending, - joinRule)); } private Table ajInternal(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, @@ -1919,9 +2016,12 @@ public Table naturalJoin( } private Table naturalJoinImpl(final Table rightTable, final MatchPair[] columnsToMatch, MatchPair[] columnsToAdd) { - return QueryPerformanceRecorder.withNugget( - "naturalJoin(" + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) + ")", - () -> naturalJoinInternal(rightTable, columnsToMatch, columnsToAdd, false)); + final UpdateGraph updateGraph = getUpdateGraph(rightTable); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return QueryPerformanceRecorder.withNugget( + "naturalJoin(" + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) + ")", + () -> naturalJoinInternal(rightTable, columnsToMatch, columnsToAdd, false)); + } } private Table naturalJoinInternal(final Table rightTable, final MatchPair[] columnsToMatch, @@ -1974,10 +2074,13 @@ public Table join( private Table joinImpl(final Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, int numRightBitsToReserve) { - return memoizeResult( - MemoizedOperationKey.crossJoin(rightTable, columnsToMatch, columnsToAdd, - numRightBitsToReserve), - () -> joinNoMemo(rightTable, columnsToMatch, columnsToAdd, numRightBitsToReserve)); + final UpdateGraph updateGraph = getUpdateGraph(rightTable); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return memoizeResult( + MemoizedOperationKey.crossJoin(rightTable, columnsToMatch, columnsToAdd, + numRightBitsToReserve), + () -> joinNoMemo(rightTable, columnsToMatch, columnsToAdd, numRightBitsToReserve)); + } } private Table joinNoMemo( @@ -2365,29 +2468,35 @@ public void onUpdate(TableUpdate upstream) { @Override public Table snapshot() { // TODO(deephaven-core#3271): Make snapshot() concurrent - return QueryPerformanceRecorder.withNugget("snapshot()", sizeForInstrumentation(), - () -> ((QueryTable) TableTools.emptyTable(1)).snapshotInternal(this, true)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return QueryPerformanceRecorder.withNugget("snapshot()", sizeForInstrumentation(), + () -> ((QueryTable) TableTools.emptyTable(1)).snapshotInternal(this, true)); + } } @Override public Table snapshotWhen(Table trigger, SnapshotWhenOptions options) { - final boolean initial = options.has(Flag.INITIAL); - final boolean incremental = options.has(Flag.INCREMENTAL); - final boolean history = options.has(Flag.HISTORY); - final String description = options.description(); - if (history) { - if (initial || incremental) { - // noinspection ThrowableNotThrown - Assert.statementNeverExecuted( - "SnapshotWhenOptions should disallow history with initial or incremental"); - return null; + final UpdateGraph updateGraph = getUpdateGraph(trigger); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + final boolean initial = options.has(Flag.INITIAL); + final boolean incremental = options.has(Flag.INCREMENTAL); + final boolean history = options.has(Flag.HISTORY); + final String description = options.description(); + if (history) { + if (initial || incremental) { + // noinspection ThrowableNotThrown + Assert.statementNeverExecuted( + "SnapshotWhenOptions should disallow history with initial or incremental"); + return null; + } + return ((QueryTable) trigger).snapshotHistory(description, this, options.stampColumns()); } - return ((QueryTable) trigger).snapshotHistory(description, this, options.stampColumns()); - } - if (incremental) { - return ((QueryTable) trigger).snapshotIncremental(description, this, initial, options.stampColumns()); + if (incremental) { + return ((QueryTable) trigger).snapshotIncremental(description, this, initial, options.stampColumns()); + } + return ((QueryTable) trigger).snapshot(description, this, initial, options.stampColumns()); } - return ((QueryTable) trigger).snapshot(description, this, initial, options.stampColumns()); } private QueryTable maybeViewForSnapshot(Collection stampColumns) { @@ -2434,18 +2543,21 @@ static ColumnSource maybeTransformToPrimitive(final ColumnSource columnSou @Override public Table sort(Collection columnsToSortBy) { - final SortPair[] sortPairs = SortPair.from(columnsToSortBy); - if (sortPairs.length == 0) { - return prepareReturnThis(); - } else if (sortPairs.length == 1) { - final String columnName = sortPairs[0].getColumn(); - final SortingOrder order = sortPairs[0].getOrder(); - if (SortedColumnsAttribute.isSortedBy(this, columnName, order)) { + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + final SortPair[] sortPairs = SortPair.from(columnsToSortBy); + if (sortPairs.length == 0) { return prepareReturnThis(); + } else if (sortPairs.length == 1) { + final String columnName = sortPairs[0].getColumn(); + final SortingOrder order = sortPairs[0].getOrder(); + if (SortedColumnsAttribute.isSortedBy(this, columnName, order)) { + return prepareReturnThis(); + } } - } - return getResult(new SortOperation(this, sortPairs)); + return getResult(new SortOperation(this, sortPairs)); + } } /** @@ -2475,215 +2587,226 @@ static int setMinimumUngroupBase(int minimumUngroupBase) { */ @Override public Table reverse() { - return getResult(new ReverseOperation(this)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return getResult(new ReverseOperation(this)); + } } @Override public Table ungroup(boolean nullFill, Collection columnsToUngroup) { - final String[] columnsToUngroupBy; - if (columnsToUngroup.isEmpty()) { - columnsToUngroupBy = getDefinition() - .getColumnStream() - .filter(c -> c.getDataType().isArray() || QueryLanguageParser.isTypedVector(c.getDataType())) - .map(ColumnDefinition::getName) - .toArray(String[]::new); - } else { - columnsToUngroupBy = columnsToUngroup.stream().map(ColumnName::name).toArray(String[]::new); - } - return QueryPerformanceRecorder.withNugget("ungroup(" + Arrays.toString(columnsToUngroupBy) + ")", - sizeForInstrumentation(), () -> { - if (columnsToUngroupBy.length == 0) { - return prepareReturnThis(); - } - - checkInitiateOperation(); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + final String[] columnsToUngroupBy; + if (columnsToUngroup.isEmpty()) { + columnsToUngroupBy = getDefinition() + .getColumnStream() + .filter(c -> c.getDataType().isArray() || QueryLanguageParser.isTypedVector(c.getDataType())) + .map(ColumnDefinition::getName) + .toArray(String[]::new); + } else { + columnsToUngroupBy = columnsToUngroup.stream().map(ColumnName::name).toArray(String[]::new); + } + return QueryPerformanceRecorder.withNugget("ungroup(" + Arrays.toString(columnsToUngroupBy) + ")", + sizeForInstrumentation(), () -> { + if (columnsToUngroupBy.length == 0) { + return prepareReturnThis(); + } - final Map> arrayColumns = new HashMap<>(); - final Map> vectorColumns = new HashMap<>(); - for (String name : columnsToUngroupBy) { - ColumnSource column = getColumnSource(name); - if (column.getType().isArray()) { - arrayColumns.put(name, column); - } else if (Vector.class.isAssignableFrom(column.getType())) { - vectorColumns.put(name, column); - } else { - throw new RuntimeException("Column " + name + " is not an array"); + checkInitiateOperation(); + + final Map> arrayColumns = new HashMap<>(); + final Map> vectorColumns = new HashMap<>(); + for (String name : columnsToUngroupBy) { + ColumnSource column = getColumnSource(name); + if (column.getType().isArray()) { + arrayColumns.put(name, column); + } else if (Vector.class.isAssignableFrom(column.getType())) { + vectorColumns.put(name, column); + } else { + throw new RuntimeException("Column " + name + " is not an array"); + } } - } - final long[] sizes = new long[intSize("ungroup")]; - long maxSize = computeMaxSize(rowSet, arrayColumns, vectorColumns, null, sizes, nullFill); - final int initialBase = Math.max(64 - Long.numberOfLeadingZeros(maxSize), minimumUngroupBase); - - final CrossJoinShiftState shiftState = new CrossJoinShiftState(initialBase, true); - - final Map> resultMap = new LinkedHashMap<>(); - for (Map.Entry> es : getColumnSourceMap().entrySet()) { - final ColumnSource column = es.getValue(); - final String name = es.getKey(); - final ColumnSource result; - if (vectorColumns.containsKey(name) || arrayColumns.containsKey(name)) { - final UngroupedColumnSource ungroupedSource = - UngroupedColumnSource.getColumnSource(column); - ungroupedSource.initializeBase(initialBase); - result = ungroupedSource; - } else { - result = BitShiftingColumnSource.maybeWrap(shiftState, column); + final long[] sizes = new long[intSize("ungroup")]; + long maxSize = computeMaxSize(rowSet, arrayColumns, vectorColumns, null, sizes, nullFill); + final int initialBase = Math.max(64 - Long.numberOfLeadingZeros(maxSize), minimumUngroupBase); + + final CrossJoinShiftState shiftState = new CrossJoinShiftState(initialBase, true); + + final Map> resultMap = new LinkedHashMap<>(); + for (Map.Entry> es : getColumnSourceMap().entrySet()) { + final ColumnSource column = es.getValue(); + final String name = es.getKey(); + final ColumnSource result; + if (vectorColumns.containsKey(name) || arrayColumns.containsKey(name)) { + final UngroupedColumnSource ungroupedSource = + UngroupedColumnSource.getColumnSource(column); + ungroupedSource.initializeBase(initialBase); + result = ungroupedSource; + } else { + result = BitShiftingColumnSource.maybeWrap(shiftState, column); + } + resultMap.put(name, result); } - resultMap.put(name, result); - } - final QueryTable result = new QueryTable( - getUngroupIndex(sizes, RowSetFactory.builderRandom(), initialBase, rowSet) - .build().toTracking(), - resultMap); - if (isRefreshing()) { - startTrackingPrev(resultMap.values()); + final QueryTable result = new QueryTable( + getUngroupIndex(sizes, RowSetFactory.builderRandom(), initialBase, rowSet) + .build().toTracking(), + resultMap); + if (isRefreshing()) { + startTrackingPrev(resultMap.values()); - addUpdateListener(new ShiftObliviousListenerImpl( - "ungroup(" + Arrays.deepToString(columnsToUngroupBy) + ')', - this, result) { + addUpdateListener(new ShiftObliviousListenerImpl( + "ungroup(" + Arrays.deepToString(columnsToUngroupBy) + ')', + this, result) { - @Override - public void onUpdate(final RowSet added, final RowSet removed, final RowSet modified) { - intSize("ungroup"); - - int newBase = shiftState.getNumShiftBits(); - RowSetBuilderRandom ungroupAdded = RowSetFactory.builderRandom(); - RowSetBuilderRandom ungroupModified = RowSetFactory.builderRandom(); - RowSetBuilderRandom ungroupRemoved = RowSetFactory.builderRandom(); - newBase = evaluateIndex(added, ungroupAdded, newBase); - newBase = evaluateModified(modified, ungroupModified, ungroupAdded, ungroupRemoved, - newBase); - if (newBase > shiftState.getNumShiftBits()) { - rebase(newBase + 1); - } else { - evaluateRemovedIndex(removed, ungroupRemoved); - final RowSet removedRowSet = ungroupRemoved.build(); - final RowSet addedRowSet = ungroupAdded.build(); - result.getRowSet().writableCast().update(addedRowSet, removedRowSet); - final RowSet modifiedRowSet = ungroupModified.build(); - - if (!modifiedRowSet.subsetOf(result.getRowSet())) { - final RowSet missingModifications = modifiedRowSet.minus(result.getRowSet()); - log.error().append("Result TrackingWritableRowSet: ") - .append(result.getRowSet().toString()) - .endl(); - log.error().append("Missing modifications: ") - .append(missingModifications.toString()).endl(); - log.error().append("Added: ").append(addedRowSet.toString()).endl(); - log.error().append("Modified: ").append(modifiedRowSet.toString()).endl(); - log.error().append("Removed: ").append(removedRowSet.toString()).endl(); - - for (Map.Entry> es : arrayColumns.entrySet()) { - ColumnSource arrayColumn = es.getValue(); - String name = es.getKey(); - - RowSet.Iterator iterator = rowSet.iterator(); - for (int i = 0; i < rowSet.size(); i++) { - final long next = iterator.nextLong(); - int size = (arrayColumn.get(next) == null ? 0 - : Array.getLength(arrayColumn.get(next))); - int prevSize = (arrayColumn.getPrev(next) == null ? 0 - : Array.getLength(arrayColumn.getPrev(next))); - log.error().append(name).append("[").append(i).append("] ").append(size) - .append(" -> ").append(prevSize).endl(); + @Override + public void onUpdate(final RowSet added, final RowSet removed, final RowSet modified) { + intSize("ungroup"); + + int newBase = shiftState.getNumShiftBits(); + RowSetBuilderRandom ungroupAdded = RowSetFactory.builderRandom(); + RowSetBuilderRandom ungroupModified = RowSetFactory.builderRandom(); + RowSetBuilderRandom ungroupRemoved = RowSetFactory.builderRandom(); + newBase = evaluateIndex(added, ungroupAdded, newBase); + newBase = evaluateModified(modified, ungroupModified, ungroupAdded, ungroupRemoved, + newBase); + if (newBase > shiftState.getNumShiftBits()) { + rebase(newBase + 1); + } else { + evaluateRemovedIndex(removed, ungroupRemoved); + final RowSet removedRowSet = ungroupRemoved.build(); + final RowSet addedRowSet = ungroupAdded.build(); + result.getRowSet().writableCast().update(addedRowSet, removedRowSet); + final RowSet modifiedRowSet = ungroupModified.build(); + + if (!modifiedRowSet.subsetOf(result.getRowSet())) { + final RowSet missingModifications = + modifiedRowSet.minus(result.getRowSet()); + log.error().append("Result TrackingWritableRowSet: ") + .append(result.getRowSet().toString()) + .endl(); + log.error().append("Missing modifications: ") + .append(missingModifications.toString()).endl(); + log.error().append("Added: ").append(addedRowSet.toString()).endl(); + log.error().append("Modified: ").append(modifiedRowSet.toString()).endl(); + log.error().append("Removed: ").append(removedRowSet.toString()).endl(); + + for (Map.Entry> es : arrayColumns.entrySet()) { + ColumnSource arrayColumn = es.getValue(); + String name = es.getKey(); + + RowSet.Iterator iterator = rowSet.iterator(); + for (int i = 0; i < rowSet.size(); i++) { + final long next = iterator.nextLong(); + int size = (arrayColumn.get(next) == null ? 0 + : Array.getLength(arrayColumn.get(next))); + int prevSize = (arrayColumn.getPrev(next) == null ? 0 + : Array.getLength(arrayColumn.getPrev(next))); + log.error().append(name).append("[").append(i).append("] ") + .append(size) + .append(" -> ").append(prevSize).endl(); + } } + + for (Map.Entry> es : vectorColumns.entrySet()) { + ColumnSource arrayColumn = es.getValue(); + String name = es.getKey(); + RowSet.Iterator iterator = rowSet.iterator(); + + for (int i = 0; i < rowSet.size(); i++) { + final long next = iterator.nextLong(); + long size = (arrayColumn.get(next) == null ? 0 + : ((Vector) arrayColumn.get(next)).size()); + long prevSize = (arrayColumn.getPrev(next) == null ? 0 + : ((Vector) arrayColumn.getPrev(next)).size()); + log.error().append(name).append("[").append(i).append("] ") + .append(size) + .append(" -> ").append(prevSize).endl(); + } + } + + Assert.assertion(false, "modifiedRowSet.subsetOf(result.build())", + modifiedRowSet, "modifiedRowSet", result.getRowSet(), + "result.build()", + shiftState.getNumShiftBits(), "shiftState.getNumShiftBits()", + newBase, + "newBase"); } - for (Map.Entry> es : vectorColumns.entrySet()) { - ColumnSource arrayColumn = es.getValue(); - String name = es.getKey(); - RowSet.Iterator iterator = rowSet.iterator(); - - for (int i = 0; i < rowSet.size(); i++) { - final long next = iterator.nextLong(); - long size = (arrayColumn.get(next) == null ? 0 - : ((Vector) arrayColumn.get(next)).size()); - long prevSize = (arrayColumn.getPrev(next) == null ? 0 - : ((Vector) arrayColumn.getPrev(next)).size()); - log.error().append(name).append("[").append(i).append("] ").append(size) - .append(" -> ").append(prevSize).endl(); + for (ColumnSource source : resultMap.values()) { + if (source instanceof UngroupedColumnSource) { + ((UngroupedColumnSource) source).setBase(newBase); } } - Assert.assertion(false, "modifiedRowSet.subsetOf(result.build())", - modifiedRowSet, "modifiedRowSet", result.getRowSet(), "result.build()", - shiftState.getNumShiftBits(), "shiftState.getNumShiftBits()", newBase, - "newBase"); + result.notifyListeners(addedRowSet, removedRowSet, modifiedRowSet); } + } + private void rebase(final int newBase) { + final WritableRowSet newRowSet = getUngroupIndex( + computeSize(getRowSet(), arrayColumns, vectorColumns, nullFill), + RowSetFactory.builderRandom(), newBase, getRowSet()) + .build(); + final TrackingWritableRowSet rowSet = result.getRowSet().writableCast(); + final RowSet added = newRowSet.minus(rowSet); + final RowSet removed = rowSet.minus(newRowSet); + final WritableRowSet modified = newRowSet; + modified.retain(rowSet); + rowSet.update(added, removed); for (ColumnSource source : resultMap.values()) { if (source instanceof UngroupedColumnSource) { ((UngroupedColumnSource) source).setBase(newBase); } } - - result.notifyListeners(addedRowSet, removedRowSet, modifiedRowSet); + shiftState.setNumShiftBitsAndUpdatePrev(newBase); + result.notifyListeners(added, removed, modified); } - } - private void rebase(final int newBase) { - final WritableRowSet newRowSet = getUngroupIndex( - computeSize(getRowSet(), arrayColumns, vectorColumns, nullFill), - RowSetFactory.builderRandom(), newBase, getRowSet()) - .build(); - final TrackingWritableRowSet rowSet = result.getRowSet().writableCast(); - final RowSet added = newRowSet.minus(rowSet); - final RowSet removed = rowSet.minus(newRowSet); - final WritableRowSet modified = newRowSet; - modified.retain(rowSet); - rowSet.update(added, removed); - for (ColumnSource source : resultMap.values()) { - if (source instanceof UngroupedColumnSource) { - ((UngroupedColumnSource) source).setBase(newBase); + private int evaluateIndex(final RowSet rowSet, final RowSetBuilderRandom ungroupBuilder, + final int newBase) { + if (rowSet.size() > 0) { + final long[] modifiedSizes = new long[rowSet.intSize("ungroup")]; + final long maxSize = computeMaxSize(rowSet, arrayColumns, vectorColumns, null, + modifiedSizes, nullFill); + final int minBase = 64 - Long.numberOfLeadingZeros(maxSize); + getUngroupIndex(modifiedSizes, ungroupBuilder, shiftState.getNumShiftBits(), + rowSet); + return Math.max(newBase, minBase); } + return newBase; } - shiftState.setNumShiftBitsAndUpdatePrev(newBase); - result.notifyListeners(added, removed, modified); - } - - private int evaluateIndex(final RowSet rowSet, final RowSetBuilderRandom ungroupBuilder, - final int newBase) { - if (rowSet.size() > 0) { - final long[] modifiedSizes = new long[rowSet.intSize("ungroup")]; - final long maxSize = computeMaxSize(rowSet, arrayColumns, vectorColumns, null, - modifiedSizes, nullFill); - final int minBase = 64 - Long.numberOfLeadingZeros(maxSize); - getUngroupIndex(modifiedSizes, ungroupBuilder, shiftState.getNumShiftBits(), - rowSet); - return Math.max(newBase, minBase); - } - return newBase; - } - private void evaluateRemovedIndex(final RowSet rowSet, - final RowSetBuilderRandom ungroupBuilder) { - if (rowSet.size() > 0) { - final long[] modifiedSizes = new long[rowSet.intSize("ungroup")]; - computePrevSize(rowSet, arrayColumns, vectorColumns, modifiedSizes, nullFill); - getUngroupIndex(modifiedSizes, ungroupBuilder, shiftState.getNumShiftBits(), - rowSet); + private void evaluateRemovedIndex(final RowSet rowSet, + final RowSetBuilderRandom ungroupBuilder) { + if (rowSet.size() > 0) { + final long[] modifiedSizes = new long[rowSet.intSize("ungroup")]; + computePrevSize(rowSet, arrayColumns, vectorColumns, modifiedSizes, nullFill); + getUngroupIndex(modifiedSizes, ungroupBuilder, shiftState.getNumShiftBits(), + rowSet); + } } - } - private int evaluateModified(final RowSet rowSet, - final RowSetBuilderRandom modifyBuilder, - final RowSetBuilderRandom addedBuilded, - final RowSetBuilderRandom removedBuilder, - final int newBase) { - if (rowSet.size() > 0) { - final long maxSize = computeModifiedIndicesAndMaxSize(rowSet, arrayColumns, - vectorColumns, null, modifyBuilder, addedBuilded, removedBuilder, - shiftState.getNumShiftBits(), nullFill); - final int minBase = 64 - Long.numberOfLeadingZeros(maxSize); - return Math.max(newBase, minBase); + private int evaluateModified(final RowSet rowSet, + final RowSetBuilderRandom modifyBuilder, + final RowSetBuilderRandom addedBuilded, + final RowSetBuilderRandom removedBuilder, + final int newBase) { + if (rowSet.size() > 0) { + final long maxSize = computeModifiedIndicesAndMaxSize(rowSet, arrayColumns, + vectorColumns, null, modifyBuilder, addedBuilded, removedBuilder, + shiftState.getNumShiftBits(), nullFill); + final int minBase = 64 - Long.numberOfLeadingZeros(maxSize); + return Math.max(newBase, minBase); + } + return newBase; } - return newBase; - } - }); - } - return result; - }); + }); + } + return result; + }); + } } private long computeModifiedIndicesAndMaxSize(RowSet rowSet, Map> arrayColumns, @@ -3088,27 +3211,30 @@ private RowSetBuilderRandom getUngroupIndex( @Override public Table selectDistinct(Collection columns) { - return QueryPerformanceRecorder.withNugget("selectDistinct(" + columns + ")", - sizeForInstrumentation(), - () -> { - final Collection columnNames = ColumnName.cast(columns).orElse(null); - if (columnNames == null) { - return view(columns).selectDistinct(); - } - final MemoizedOperationKey aggKey = - MemoizedOperationKey.aggBy(Collections.emptyList(), false, null, columnNames); - return memoizeResult(aggKey, () -> { - final QueryTable result = - aggNoMemo(AggregationProcessor.forSelectDistinct(), false, null, columnNames); - if (isAddOnly()) { - result.setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, true); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return QueryPerformanceRecorder.withNugget("selectDistinct(" + columns + ")", + sizeForInstrumentation(), + () -> { + final Collection columnNames = ColumnName.cast(columns).orElse(null); + if (columnNames == null) { + return view(columns).selectDistinct(); } - if (isAppendOnly()) { - result.setAttribute(Table.APPEND_ONLY_TABLE_ATTRIBUTE, true); - } - return result; + final MemoizedOperationKey aggKey = + MemoizedOperationKey.aggBy(Collections.emptyList(), false, null, columnNames); + return memoizeResult(aggKey, () -> { + final QueryTable result = + aggNoMemo(AggregationProcessor.forSelectDistinct(), false, null, columnNames); + if (isAddOnly()) { + result.setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, true); + } + if (isAppendOnly()) { + result.setAttribute(Table.APPEND_ONLY_TABLE_ATTRIBUTE, true); + } + return result; + }); }); - }); + } } /** @@ -3145,7 +3271,10 @@ public void propagateFlatness(QueryTable result) { */ @Override public QueryTable getSubTable(@NotNull final TrackingRowSet rowSet) { - return getSubTable(rowSet, null, null, CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return getSubTable(rowSet, null, null, CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + } } /** @@ -3174,14 +3303,17 @@ public QueryTable getSubTable( @Nullable final ModifiedColumnSet resultModifiedColumnSet, @Nullable final Map attributes, @NotNull final Object... parents) { - // There is no checkInitiateOperation check here, because partitionBy calls it internally and the RowSet - // results are not updated internally, but rather externally. - final QueryTable result = new QueryTable(definition, rowSet, columns, resultModifiedColumnSet, attributes); - for (final Object parent : parents) { - result.addParentReference(parent); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + // There is no checkInitiateOperation check here, because partitionBy calls it internally and the RowSet + // results are not updated internally, but rather externally. + final QueryTable result = new QueryTable(definition, rowSet, columns, resultModifiedColumnSet, attributes); + for (final Object parent : parents) { + result.addParentReference(parent); + } + result.setLastNotificationStep(getLastNotificationStep()); + return result; } - result.setLastNotificationStep(getLastNotificationStep()); - return result; } /** @@ -3191,11 +3323,17 @@ public QueryTable getSubTable( */ @Override public QueryTable copy() { - return copy(StandardOptions.COPY_ALL); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return copy(StandardOptions.COPY_ALL); + } } public QueryTable copy(Predicate shouldCopy) { - return copy(definition, shouldCopy); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return copy(definition, shouldCopy); + } } private enum StandardOptions implements Predicate { @@ -3214,28 +3352,31 @@ public boolean test(String attributeName) { } public QueryTable copy(TableDefinition definition, Predicate shouldCopy) { - return QueryPerformanceRecorder.withNugget("copy()", sizeForInstrumentation(), () -> { - final Mutable result = new MutableObject<>(); - - final SwapListener swapListener = createSwapListenerIfRefreshing(SwapListener::new); - initializeWithSnapshot("copy", swapListener, (usePrev, beforeClockValue) -> { - final QueryTable resultTable = new CopiedTable(definition, this); - propagateFlatness(resultTable); - if (shouldCopy != StandardOptions.COPY_NONE) { - copyAttributes(resultTable, shouldCopy); - } - if (swapListener != null) { - final ListenerImpl listener = new ListenerImpl("copy()", this, resultTable); - swapListener.setListenerAndResult(listener, resultTable); - } + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return QueryPerformanceRecorder.withNugget("copy()", sizeForInstrumentation(), () -> { + final Mutable result = new MutableObject<>(); + + final SwapListener swapListener = createSwapListenerIfRefreshing(SwapListener::new); + initializeWithSnapshot("copy", swapListener, (usePrev, beforeClockValue) -> { + final QueryTable resultTable = new CopiedTable(definition, this); + propagateFlatness(resultTable); + if (shouldCopy != StandardOptions.COPY_NONE) { + copyAttributes(resultTable, shouldCopy); + } + if (swapListener != null) { + final ListenerImpl listener = new ListenerImpl("copy()", this, resultTable); + swapListener.setListenerAndResult(listener, resultTable); + } - result.setValue(resultTable); + result.setValue(resultTable); - return true; - }); + return true; + }); - return result.getValue(); - }); + return result.getValue(); + }); + } } @VisibleForTesting @@ -3279,8 +3420,11 @@ public R memoizeResult(MemoizedOperationKey memoKey, Supplier operation) public Table updateBy(@NotNull final UpdateByControl control, @NotNull final Collection ops, @NotNull final Collection byColumns) { - return QueryPerformanceRecorder.withNugget("updateBy()", sizeForInstrumentation(), - () -> UpdateBy.updateBy(this, ops, byColumns, control)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return QueryPerformanceRecorder.withNugget("updateBy()", sizeForInstrumentation(), + () -> UpdateBy.updateBy(this, ops, byColumns, control)); + } } /** @@ -3430,13 +3574,13 @@ private void checkInitiateOperation() { public static void checkInitiateOperation(@NotNull final Table table) { if (table.isRefreshing()) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + table.getUpdateGraph().checkInitiateSerialTableOperation(); } } public static void checkInitiateBinaryOperation(@NotNull final Table first, @NotNull final Table second) { if (first.isRefreshing() || second.isRefreshing()) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + first.getUpdateGraph(second).checkInitiateSerialTableOperation(); } } @@ -3461,7 +3605,10 @@ public R apply(@NotNull final Function function) { } public Table wouldMatch(WouldMatchPair... matchers) { - return getResult(new WouldMatchOperation(this, matchers)); + final UpdateGraph updateGraph = getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return getResult(new WouldMatchOperation(this, matchers)); + } } public static SafeCloseable disableParallelWhereForThread() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/ReverseOperation.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/ReverseOperation.java index 398508f72ef..007e9c0b0cb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/ReverseOperation.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/ReverseOperation.java @@ -4,6 +4,7 @@ package io.deephaven.engine.table.impl; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.impl.rsp.RspArray; @@ -11,7 +12,6 @@ import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.TableUpdate; import io.deephaven.engine.table.TableUpdateListener; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.table.impl.sources.ReversedColumnSource; import io.deephaven.util.annotations.VisibleForTesting; import org.jetbrains.annotations.NotNull; @@ -195,7 +195,7 @@ private void onUpdate(final TableUpdate upstream) { } // Update pivot logic. - lastPivotPointChange = LogicalClock.DEFAULT.currentStep(); + lastPivotPointChange = parent.getUpdateGraph().clock().currentStep(); prevPivotPoint = pivotPoint; pivotPoint += newShift; } else { @@ -243,8 +243,10 @@ private static long computePivot(final long parentLastRowKey) { } private long getPrevPivotPoint() { - if ((prevPivotPoint != pivotPoint) && (LogicalClock.DEFAULT.currentStep() != lastPivotPointChange)) { - prevPivotPoint = pivotPoint; + if ((prevPivotPoint != pivotPoint)) { + if (parent.getUpdateGraph().clock().currentStep() != lastPivotPointChange) { + prevPivotPoint = pivotPoint; + } } return prevPivotPoint; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SelectOrUpdateListener.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SelectOrUpdateListener.java index 891a7166801..eb9b62849aa 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SelectOrUpdateListener.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SelectOrUpdateListener.java @@ -10,10 +10,9 @@ import io.deephaven.engine.table.impl.perf.BasePerformanceEntry; import io.deephaven.engine.table.impl.select.analyzers.SelectAndViewAnalyzer; import io.deephaven.engine.updategraph.TerminalNotification; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.util.ImmediateJobScheduler; import io.deephaven.engine.table.impl.util.JobScheduler; -import io.deephaven.engine.table.impl.util.UpdateGraphProcessorJobScheduler; +import io.deephaven.engine.table.impl.util.UpdateGraphJobScheduler; import java.util.BitSet; import java.util.Map; @@ -59,7 +58,7 @@ class SelectOrUpdateListener extends BaseTable.ListenerImpl { this.enableParallelUpdate = (QueryTable.FORCE_PARALLEL_SELECT_AND_UPDATE || (QueryTable.ENABLE_PARALLEL_SELECT_AND_UPDATE - && UpdateGraphProcessor.DEFAULT.getUpdateThreads() > 1)) + && getUpdateGraph().parallelismFactor() > 1)) && analyzer.allowCrossColumnParallelization(); analyzer.setAllNewColumns(allNewColumns); } @@ -86,7 +85,7 @@ public void onUpdate(final TableUpdate upstream) { JobScheduler jobScheduler; if (enableParallelUpdate) { - jobScheduler = new UpdateGraphProcessorJobScheduler(); + jobScheduler = new UpdateGraphJobScheduler(getUpdateGraph()); } else { jobScheduler = ImmediateJobScheduler.INSTANCE; } @@ -131,7 +130,7 @@ private void completionRoutine(TableUpdate upstream, JobScheduler jobScheduler, // if the entry exists, then we install a terminal notification so that we don't lose the performance from // this execution if (accumulated != null) { - UpdateGraphProcessor.DEFAULT.addNotification(new TerminalNotification() { + getUpdateGraph().addNotification(new TerminalNotification() { @Override public void run() { synchronized (accumulated) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SimpleSourceTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SimpleSourceTable.java index 5c8ffb03d11..b6065c15591 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SimpleSourceTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SimpleSourceTable.java @@ -9,9 +9,6 @@ import io.deephaven.engine.table.impl.locations.TableLocationProvider; import io.deephaven.engine.table.impl.select.SelectColumn; -import java.util.Map; -import java.util.Set; - /** * Simple source table with no partitioning support. */ diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SourcePartitionedTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SourcePartitionedTable.java index cd2dde47469..4ce355903cb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SourcePartitionedTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SourcePartitionedTable.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.RowSetShiftData; @@ -10,7 +11,8 @@ import io.deephaven.engine.table.*; import io.deephaven.engine.table.impl.partitioned.PartitionedTableImpl; import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.NotificationQueue; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.updategraph.UpdateSourceCombiner; import io.deephaven.engine.table.impl.locations.*; import io.deephaven.engine.table.impl.locations.impl.SingleTableLocationProvider; @@ -120,7 +122,7 @@ private UnderlyingTableMaintainer( final boolean needToRefreshLocations = refreshLocations && tableLocationProvider.supportsSubscriptions(); if (needToRefreshLocations || refreshSizes) { result.setRefreshing(true); - refreshCombiner = new UpdateSourceCombiner(); + refreshCombiner = new UpdateSourceCombiner(result.getUpdateGraph()); result.addParentReference(refreshCombiner); } else { refreshCombiner = null; @@ -158,7 +160,8 @@ protected void instrumentedRefresh() { if (result.isRefreshing()) { // noinspection ConstantConditions - UpdateGraphProcessor.DEFAULT.addSource(refreshCombiner); + UpdateGraph updateGraph = result.getUpdateGraph(); + updateGraph.addSource(refreshCombiner); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SourceTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SourceTable.java index 92e65f783e4..c403a8e358d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SourceTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SourceTable.java @@ -173,7 +173,7 @@ private void initializeLocationSizes() { return; } rowSet.initializePreviousValue(); - final long currentClockValue = LogicalClock.DEFAULT.currentValue(); + final long currentClockValue = getUpdateGraph().clock().currentValue(); setLastNotificationStep(LogicalClock.getState(currentClockValue) == LogicalClock.State.Updating ? LogicalClock.getStep(currentClockValue) - 1 : LogicalClock.getStep(currentClockValue)); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SparseSelect.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SparseSelect.java index 91fbae9470e..478c69a3aa9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SparseSelect.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SparseSelect.java @@ -16,7 +16,6 @@ import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ObjectSparseArraySource; import io.deephaven.engine.table.impl.sources.SparseArrayColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.util.SafeCloseableArray; import io.deephaven.util.SafeCloseablePair; import io.deephaven.util.thread.NamingThreadFactory; @@ -136,7 +135,7 @@ private static Table sparseSelect(QueryTable source, String[] preserveColumns, S return QueryPerformanceRecorder.withNugget("sparseSelect(" + Arrays.toString(columnNames) + ")", source.sizeForInstrumentation(), () -> { if (source.isRefreshing()) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + source.getUpdateGraph().checkInitiateSerialTableOperation(); } final Map> resultColumns = new LinkedHashMap<>(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SwapListener.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SwapListener.java index b4fe5e5c9af..8cf43d01da8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SwapListener.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SwapListener.java @@ -8,6 +8,9 @@ import io.deephaven.base.reference.SwappableDelegatingReference; import io.deephaven.base.reference.WeakSimpleReference; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.engine.updategraph.NotificationQueue; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableUpdate; import io.deephaven.engine.table.TableUpdateListener; @@ -210,7 +213,7 @@ public LogOutput append(final LogOutput logOutput) { public void run() { log.info().append("SwapListener {source=").append(System.identityHashCode(sourceTable)) .append(" swap=").append(System.identityHashCode(SwapListener.this)) - .append(", clock=").append(LogicalClock.DEFAULT.currentStep()) + .append(", clock=").append(ExecutionContext.getContext().getUpdateGraph().clock().currentStep()) .append("} Firing notification") .endl(); notification.run(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/SwapListenerEx.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/SwapListenerEx.java index dadc6a48d75..eaed0416a18 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/SwapListenerEx.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/SwapListenerEx.java @@ -1,6 +1,6 @@ package io.deephaven.engine.table.impl; -import io.deephaven.engine.liveness.LivenessReferent; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.TableUpdateListener; import io.deephaven.engine.table.impl.remote.ConstructSnapshot; import io.deephaven.engine.updategraph.LogicalClock; @@ -58,7 +58,8 @@ public synchronized Boolean startWithExtra(final long beforeClockValue) { } else { WaitNotification.waitForSatisfaction(beforeStep, extra); extraLastNotificationStep = extra.getLastNotificationStep(); - result = LogicalClock.DEFAULT.currentStep() == beforeStep ? false : null; + result = ExecutionContext.getContext().getUpdateGraph().clock().currentStep() == beforeStep ? false + : null; } } else if (extraUpdatedOnThisCycle) { if (sourceTable.satisfied(beforeStep)) { @@ -66,7 +67,8 @@ public synchronized Boolean startWithExtra(final long beforeClockValue) { } else { WaitNotification.waitForSatisfaction(beforeStep, sourceTable); lastNotificationStep = sourceTable.getLastNotificationStep(); - result = LogicalClock.DEFAULT.currentStep() == beforeStep ? false : null; + result = ExecutionContext.getContext().getUpdateGraph().clock().currentStep() == beforeStep ? false + : null; } } else { result = true; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableAdapter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableAdapter.java index e7608ae104a..e659efc6acb 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/TableAdapter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/TableAdapter.java @@ -21,6 +21,7 @@ import io.deephaven.engine.table.*; import io.deephaven.engine.table.hierarchical.RollupTable; import io.deephaven.engine.table.hierarchical.TreeTable; +import io.deephaven.engine.updategraph.UpdateGraph; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -63,6 +64,11 @@ default String getDescription() { return throwUnsupported(); } + @Override + default UpdateGraph getUpdateGraph() { + return throwUnsupported(); + } + @Override default boolean isRefreshing() { return throwUnsupported(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/TimeTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/TimeTable.java index d959651b8fa..91a8774be25 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/TimeTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/TimeTable.java @@ -9,6 +9,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; @@ -20,7 +21,6 @@ import io.deephaven.engine.table.impl.perf.PerformanceEntry; import io.deephaven.engine.table.impl.perf.UpdatePerformanceTracker; import io.deephaven.engine.table.impl.sources.FillUnordered; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.UpdateSourceRegistrar; import io.deephaven.engine.util.TableTools; import io.deephaven.function.Numeric; @@ -49,12 +49,13 @@ * * To create a TimeTable, you should use the {@link TableTools#timeTable} family of methods. * - * @implNote The constructor publishes {@code this} to the {@link UpdateGraphProcessor} and thus cannot be subclassed. + * @implNote The constructor publishes {@code this} to the {@link UpdateSourceRegistrar} and thus cannot be subclassed. */ public final class TimeTable extends QueryTable implements Runnable { public static class Builder { - private UpdateSourceRegistrar registrar = UpdateGraphProcessor.DEFAULT; + private UpdateSourceRegistrar registrar = ExecutionContext.getContext().getUpdateGraph(); + private Clock clock; private Instant startTime; private long period; @@ -112,6 +113,7 @@ public static Builder newBuilder() { private final Clock clock; private final PerformanceEntry entry; private final boolean isBlinkTable; + private final UpdateSourceRegistrar registrar; public TimeTable( UpdateSourceRegistrar registrar, @@ -120,6 +122,7 @@ public TimeTable( long period, boolean isBlinkTable) { super(RowSetFactory.empty().toTracking(), initColumn(startTime, period)); + this.registrar = registrar; this.isBlinkTable = isBlinkTable; final String name = isBlinkTable ? "TimeTableBlink" : "TimeTable"; this.entry = UpdatePerformanceTracker.getInstance().getEntry(name + "(" + startTime + "," + period + ")"); @@ -191,7 +194,7 @@ private void refresh(final boolean notifyListeners) { @Override protected void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + registrar.removeSource(this); } private static final class SyntheticInstantSource extends AbstractColumnSource implements diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/UncoalescedTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/UncoalescedTable.java index f34ef7159a7..4dd8f5e6b1f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/UncoalescedTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/UncoalescedTable.java @@ -18,6 +18,7 @@ import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.Liveness; import io.deephaven.engine.primitive.iterator.*; import io.deephaven.engine.rowset.TrackingRowSet; @@ -27,6 +28,7 @@ import io.deephaven.engine.table.impl.updateby.UpdateBy; import io.deephaven.api.util.ConcurrentMethod; import io.deephaven.util.QueryConstants; +import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -64,15 +66,17 @@ public UncoalescedTable(@NotNull final TableDefinition definition, @NotNull fina protected abstract Table doCoalesce(); public final Table coalesce() { - Table localCoalesced; - if (Liveness.verifyCachedObjectForReuse(localCoalesced = coalesced)) { - return localCoalesced; - } - synchronized (coalescingLock) { + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + Table localCoalesced; if (Liveness.verifyCachedObjectForReuse(localCoalesced = coalesced)) { return localCoalesced; } - return coalesced = doCoalesce(); + synchronized (coalescingLock) { + if (Liveness.verifyCachedObjectForReuse(localCoalesced = coalesced)) { + return localCoalesced; + } + return coalesced = doCoalesce(); + } } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/UpdatableTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/UpdatableTable.java index cc70e237520..fdbea26f01f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/UpdatableTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/UpdatableTable.java @@ -7,7 +7,6 @@ import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.TableUpdate; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.ColumnSource; import gnu.trove.impl.Constants; import gnu.trove.set.TLongSet; @@ -148,6 +147,6 @@ protected void doNotifyListeners(TableUpdate update) { @Override public void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/UpdateSourceQueryTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/UpdateSourceQueryTable.java index 90ea02302bc..129e8f53b87 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/UpdateSourceQueryTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/UpdateSourceQueryTable.java @@ -7,13 +7,13 @@ import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.TrackingWritableRowSet; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.table.ColumnSource; import java.util.Map; /** - * A {@link QueryTable} that acts as an update source within the {@link UpdateGraphProcessor}, with {@link RowSet} + * A {@link QueryTable} that acts as an update source within the {@link PeriodicUpdateGraph}, with {@link RowSet} * changes queued externally by a single producer. */ public class UpdateSourceQueryTable extends QueryTable implements Runnable { @@ -50,6 +50,6 @@ public synchronized void addRowKeyRange(final long firstRowKey, final long lastR @Override public void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/WhereListener.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/WhereListener.java index 3b0399b8258..b7dd52d4fe5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/WhereListener.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/WhereListener.java @@ -2,6 +2,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.WritableRowSet; @@ -10,9 +11,7 @@ import io.deephaven.engine.table.impl.perf.BasePerformanceEntry; import io.deephaven.engine.table.impl.select.DynamicWhereFilter; import io.deephaven.engine.table.impl.select.WhereFilter; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.NotificationQueue; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.io.logger.Logger; import java.util.*; @@ -68,18 +67,21 @@ class WhereListener extends MergedListener { : sourceTable.newModifiedColumnSet( filterColumnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); - if (UpdateGraphProcessor.DEFAULT.getUpdateThreads() > 1) { + if (getUpdateGraph().parallelismFactor() > 1) { minimumThreadSize = QueryTable.PARALLEL_WHERE_ROWS_PER_SEGMENT; } else { minimumThreadSize = Long.MAX_VALUE; } - segmentCount = QueryTable.PARALLEL_WHERE_SEGMENTS <= 0 ? UpdateGraphProcessor.DEFAULT.getUpdateThreads() - : QueryTable.PARALLEL_WHERE_SEGMENTS; + if (QueryTable.PARALLEL_WHERE_SEGMENTS <= 0) { + segmentCount = getUpdateGraph().parallelismFactor(); + } else { + segmentCount = QueryTable.PARALLEL_WHERE_SEGMENTS; + } } @Override public void process() { - initialNotificationStep = LogicalClock.DEFAULT.currentStep(); + initialNotificationStep = getUpdateGraph().clock().currentStep(); if (result.refilterRequested()) { final TableUpdate update = recorder != null ? recorder.getUpdate() : null; @@ -185,7 +187,7 @@ ListenerFilterExecution makeFilterExecution(RowSet refilter) { } void setFinalExecutionStep() { - finalNotificationStep = LogicalClock.DEFAULT.currentStep(); + finalNotificationStep = getUpdateGraph().clock().currentStep(); } ListenerFilterExecution makeFilterExecution() { @@ -241,8 +243,8 @@ ListenerFilterExecution makeChild( @Override void enqueueSubFilters(List subFilters, CombinationNotification combinationNotification) { - UpdateGraphProcessor.DEFAULT.addNotifications(subFilters); - UpdateGraphProcessor.DEFAULT.addNotification(combinationNotification); + getUpdateGraph().addNotifications(subFilters); + getUpdateGraph().addNotification(combinationNotification); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/ZeroKeyCrossJoinShiftState.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/ZeroKeyCrossJoinShiftState.java index 678662ebe40..e0e2586eec9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/ZeroKeyCrossJoinShiftState.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/ZeroKeyCrossJoinShiftState.java @@ -1,7 +1,7 @@ package io.deephaven.engine.table.impl; import io.deephaven.base.verify.Assert; -import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.engine.context.ExecutionContext; public class ZeroKeyCrossJoinShiftState extends CrossJoinShiftState { private boolean rightEmpty; @@ -20,7 +20,7 @@ void startTrackingPrevious() { void setRightEmpty(boolean rightEmpty) { if (isTrackingPrev) { this.prevRightEmpty = this.rightEmpty; - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = ExecutionContext.getContext().getUpdateGraph().clock().currentStep(); Assert.lt(emptyChangeStep, "emptyChangeStep", currentStep, "currentStep"); this.emptyChangeStep = currentStep; } @@ -32,7 +32,8 @@ public boolean rightEmpty() { } public boolean rightEmptyPrev() { - if (emptyChangeStep != -1 && emptyChangeStep == LogicalClock.DEFAULT.currentStep()) { + if (emptyChangeStep != -1 + && emptyChangeStep == ExecutionContext.getContext().getUpdateGraph().clock().currentStep()) { return prevRightEmpty; } return rightEmpty; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java index f56a62e4e20..f7d3764223c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ChunkedOperatorAggregationHelper.java @@ -14,6 +14,7 @@ import io.deephaven.chunk.attributes.ChunkPositions; import io.deephaven.chunk.attributes.Values; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.*; import io.deephaven.engine.rowset.*; @@ -1619,6 +1620,7 @@ private static OperatorAggregationStateManager initialKeyTableAddition( @NotNull final AggregationContext ac, @NotNull final MutableInt outputPosition, @NotNull final Supplier stateManagerSupplier) { + // This logic is duplicative of the logic in the main aggregation function, but it's hard to consolidate // further. A better strategy might be to do a selectDistinct first, but that would result in more hash table // inserts. @@ -1635,16 +1637,20 @@ private static OperatorAggregationStateManager initialKeyTableAddition( final OperatorAggregationStateManager stateManager; if (initialKeys.isRefreshing()) { - final MutableObject stateManagerHolder = new MutableObject<>(); - ConstructSnapshot.callDataSnapshotFunction( - "InitialKeyTableSnapshot-" + System.identityHashCode(initialKeys) + ": ", - ConstructSnapshot.makeSnapshotControl(false, true, (NotificationStepSource) initialKeys), - (final boolean usePrev, final long beforeClockValue) -> { - stateManagerHolder.setValue(makeInitializedStateManager(initialKeys, reinterpretedKeySources, - ac, outputPosition, stateManagerSupplier, useGroupingAllowed, usePrev)); - return true; - }); - stateManager = stateManagerHolder.getValue(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph( + initialKeys.getUpdateGraph()).open()) { + final MutableObject stateManagerHolder = new MutableObject<>(); + ConstructSnapshot.callDataSnapshotFunction( + "InitialKeyTableSnapshot-" + System.identityHashCode(initialKeys) + ": ", + ConstructSnapshot.makeSnapshotControl(false, true, (NotificationStepSource) initialKeys), + (final boolean usePrev, final long beforeClockValue) -> { + stateManagerHolder.setValue(makeInitializedStateManager( + initialKeys, reinterpretedKeySources, ac, outputPosition, stateManagerSupplier, + useGroupingAllowed, usePrev)); + return true; + }); + stateManager = stateManagerHolder.getValue(); + } } else { stateManager = makeInitializedStateManager(initialKeys, reinterpretedKeySources, ac, outputPosition, stateManagerSupplier, useGroupingAllowed, false); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.java index 5ccfced74b2..95f586980a1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteChunkedCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -276,7 +277,9 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, ByteChunkedCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + ByteChunkedCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.java index 1e82d51feb6..d98933a6e9f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ByteRollupCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -524,7 +525,9 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ByteRollupCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + ByteRollupCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); resultColumn.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.java index 0994080c2f1..52170bf6c7f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharChunkedCountDistinctOperator.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -271,7 +272,9 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, CharChunkedCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + CharChunkedCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.java index 9f1ba28b445..1e0349aa201 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/CharRollupCountDistinctOperator.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -519,7 +520,9 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, CharRollupCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + CharRollupCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); resultColumn.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.java index d8f40746860..ff4d8558df3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleChunkedCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -276,7 +277,9 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, DoubleChunkedCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + DoubleChunkedCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.java index 9f9e4b808bb..05f6bb69edc 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/DoubleRollupCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -524,7 +525,9 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, DoubleRollupCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + DoubleRollupCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); resultColumn.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.java index cfe8889a39c..6a323f0b8cf 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatChunkedCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -276,7 +277,9 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, FloatChunkedCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + FloatChunkedCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.java index 029a0dfd8d9..deef2c866b9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/FloatRollupCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -524,7 +525,9 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, FloatRollupCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + FloatRollupCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); resultColumn.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.java index 57a30a63ddd..a223b21d742 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntChunkedCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -276,7 +277,9 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, IntChunkedCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + IntChunkedCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.java index 5d9061a7e56..7f229440c83 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/IntRollupCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -524,7 +525,9 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, IntRollupCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + IntRollupCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); resultColumn.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.java index f4afe9c634d..a0eda766c2d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongChunkedCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -276,7 +277,9 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, LongChunkedCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + LongChunkedCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.java index 348dfad2845..1cae2d93fd3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/LongRollupCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -524,7 +525,9 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, LongRollupCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + LongRollupCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); resultColumn.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.java index f6f0732e80e..2dbfc0def7d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectChunkedCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -277,7 +278,9 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, ObjectChunkedCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + ObjectChunkedCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.java index e7e1b9e6a1f..ef8777bddde 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ObjectRollupCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -525,7 +526,9 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ObjectRollupCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + ObjectRollupCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); resultColumn.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.java index 39e66d4af62..c2a11387ac8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortChunkedCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -276,7 +277,9 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, ShortChunkedCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + ShortChunkedCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.java index 9090f80ff5b..c1d2790aac4 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/count/ShortRollupCountDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.count; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -524,7 +525,9 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ShortRollupCountDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + ShortRollupCountDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); resultColumn.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ByteChunkedDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ByteChunkedDistinctOperator.java index e3ec5abcd29..a405e47c3a2 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ByteChunkedDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ByteChunkedDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -277,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ByteChunkedDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ByteChunkedDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ByteRollupDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ByteRollupDistinctOperator.java index c388be5f9aa..2d7a566edbd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ByteRollupDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ByteRollupDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -497,7 +498,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ByteRollupDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ByteRollupDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); internalResult.startTrackingPrevValues(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/CharChunkedDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/CharChunkedDistinctOperator.java index 85d06b5976b..fc8bc55d01a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/CharChunkedDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/CharChunkedDistinctOperator.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -272,7 +273,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, CharChunkedDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), CharChunkedDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/CharRollupDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/CharRollupDistinctOperator.java index f5f5e0995d1..99d4fa9d0f6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/CharRollupDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/CharRollupDistinctOperator.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -492,7 +493,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, CharRollupDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), CharRollupDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); internalResult.startTrackingPrevValues(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/DoubleChunkedDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/DoubleChunkedDistinctOperator.java index 6fdbb64119e..37cfdc67d61 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/DoubleChunkedDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/DoubleChunkedDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -277,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, DoubleChunkedDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), DoubleChunkedDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/DoubleRollupDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/DoubleRollupDistinctOperator.java index 845e5e5627d..8a2d0be4b63 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/DoubleRollupDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/DoubleRollupDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -497,7 +498,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, DoubleRollupDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), DoubleRollupDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); internalResult.startTrackingPrevValues(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/FloatChunkedDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/FloatChunkedDistinctOperator.java index 795c5011cf3..6ac6043d2ae 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/FloatChunkedDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/FloatChunkedDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -277,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, FloatChunkedDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), FloatChunkedDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/FloatRollupDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/FloatRollupDistinctOperator.java index 237ba7ccc1e..ebf48e73dc1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/FloatRollupDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/FloatRollupDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -497,7 +498,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, FloatRollupDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), FloatRollupDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); internalResult.startTrackingPrevValues(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/IntChunkedDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/IntChunkedDistinctOperator.java index 401b344b2dc..3d2f572a016 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/IntChunkedDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/IntChunkedDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -277,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, IntChunkedDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), IntChunkedDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/IntRollupDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/IntRollupDistinctOperator.java index 629cf48262e..988346fa715 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/IntRollupDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/IntRollupDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -497,7 +498,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, IntRollupDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), IntRollupDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); internalResult.startTrackingPrevValues(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/LongChunkedDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/LongChunkedDistinctOperator.java index 995cac884ae..6d8980f62af 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/LongChunkedDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/LongChunkedDistinctOperator.java @@ -13,6 +13,7 @@ import io.deephaven.engine.table.impl.sources.BoxedColumnSource; import io.deephaven.engine.table.impl.by.ssmcountdistinct.InstantSsmSourceWrapper; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -287,7 +288,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, LongChunkedDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), LongChunkedDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/LongRollupDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/LongRollupDistinctOperator.java index 3316241e980..31b9adb47e9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/LongRollupDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/LongRollupDistinctOperator.java @@ -13,6 +13,7 @@ import io.deephaven.engine.table.impl.sources.BoxedColumnSource; import io.deephaven.engine.table.impl.by.ssmcountdistinct.InstantSsmSourceWrapper; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -507,7 +508,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, LongRollupDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), LongRollupDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); internalResult.startTrackingPrevValues(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ObjectChunkedDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ObjectChunkedDistinctOperator.java index 0aa41659cf1..40f41fb3c20 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ObjectChunkedDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ObjectChunkedDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -278,7 +279,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ObjectChunkedDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ObjectChunkedDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ObjectRollupDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ObjectRollupDistinctOperator.java index f9c8087032c..c6a12079b80 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ObjectRollupDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ObjectRollupDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -498,7 +499,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ObjectRollupDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ObjectRollupDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); internalResult.startTrackingPrevValues(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ShortChunkedDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ShortChunkedDistinctOperator.java index 8e10c890a9c..ae3b0295257 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ShortChunkedDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ShortChunkedDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -277,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ShortChunkedDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ShortChunkedDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ShortRollupDistinctOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ShortRollupDistinctOperator.java index f678f0612a7..6dbcb55b6cf 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ShortRollupDistinctOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/distinct/ShortRollupDistinctOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.distinct; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -497,7 +498,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ShortRollupDistinctOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ShortRollupDistinctOperator::flushPrevious); touchedStates = RowSetFactory.empty(); internalResult.startTrackingPrevValues(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ByteChunkedUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ByteChunkedUniqueOperator.java index 5bccb39fd56..1367ac25d85 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ByteChunkedUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ByteChunkedUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -286,7 +287,7 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, ByteChunkedUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ByteChunkedUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ByteRollupUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ByteRollupUniqueOperator.java index e7f58753af3..8bc707960c8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ByteRollupUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ByteRollupUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -523,7 +524,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ByteRollupUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ByteRollupUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); internalResult.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/CharChunkedUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/CharChunkedUniqueOperator.java index 7393b46bf3e..f3e835cd17d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/CharChunkedUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/CharChunkedUniqueOperator.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -281,7 +282,7 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, CharChunkedUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), CharChunkedUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/CharRollupUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/CharRollupUniqueOperator.java index d79717553e6..a6bfddddc22 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/CharRollupUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/CharRollupUniqueOperator.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -518,7 +519,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, CharRollupUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), CharRollupUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); internalResult.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/DoubleChunkedUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/DoubleChunkedUniqueOperator.java index d913d1c74d8..567ee5a8afa 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/DoubleChunkedUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/DoubleChunkedUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -286,7 +287,7 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, DoubleChunkedUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), DoubleChunkedUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/DoubleRollupUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/DoubleRollupUniqueOperator.java index de0afe47888..021ae65fab1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/DoubleRollupUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/DoubleRollupUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -523,7 +524,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, DoubleRollupUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), DoubleRollupUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); internalResult.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/FloatChunkedUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/FloatChunkedUniqueOperator.java index 956d8edc574..0ee147e8aa5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/FloatChunkedUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/FloatChunkedUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -286,7 +287,7 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, FloatChunkedUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), FloatChunkedUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/FloatRollupUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/FloatRollupUniqueOperator.java index 54ca90be7e4..521d1a93631 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/FloatRollupUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/FloatRollupUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -523,7 +524,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, FloatRollupUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), FloatRollupUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); internalResult.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/IntChunkedUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/IntChunkedUniqueOperator.java index 6436bfd8b8e..22aa4071b76 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/IntChunkedUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/IntChunkedUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -286,7 +287,7 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, IntChunkedUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), IntChunkedUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/IntRollupUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/IntRollupUniqueOperator.java index f91f3059527..bd1bd06bfab 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/IntRollupUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/IntRollupUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -523,7 +524,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, IntRollupUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), IntRollupUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); internalResult.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/LongChunkedUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/LongChunkedUniqueOperator.java index 878a699c338..fac58f90a05 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/LongChunkedUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/LongChunkedUniqueOperator.java @@ -13,6 +13,7 @@ import io.deephaven.engine.table.impl.sources.BoxedColumnSource; import io.deephaven.engine.table.impl.by.ssmcountdistinct.InstantSsmSourceWrapper; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -296,7 +297,7 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, LongChunkedUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), LongChunkedUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/LongRollupUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/LongRollupUniqueOperator.java index 68befeee289..8a8c9a28698 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/LongRollupUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/LongRollupUniqueOperator.java @@ -13,6 +13,7 @@ import io.deephaven.engine.table.impl.sources.BoxedColumnSource; import io.deephaven.engine.table.impl.by.ssmcountdistinct.InstantSsmSourceWrapper; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -533,7 +534,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, LongRollupUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), LongRollupUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); internalResult.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ObjectChunkedUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ObjectChunkedUniqueOperator.java index f26316b37ab..01da2f46bd8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ObjectChunkedUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ObjectChunkedUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -287,7 +288,7 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, ObjectChunkedUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ObjectChunkedUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ObjectRollupUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ObjectRollupUniqueOperator.java index b642795d38d..99cc4c7cba3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ObjectRollupUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ObjectRollupUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -524,7 +525,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ObjectRollupUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ObjectRollupUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); internalResult.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ShortChunkedUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ShortChunkedUniqueOperator.java index 67a3fa32963..db1c2c441c6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ShortChunkedUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ShortChunkedUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -286,7 +287,7 @@ public void startTrackingPrevValues() { } ssms.startTrackingPrevValues(); - prevFlusher = new UpdateCommitter<>(this, ShortChunkedUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ShortChunkedUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ShortRollupUniqueOperator.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ShortRollupUniqueOperator.java index 7fc507843e5..0a57859d2f8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ShortRollupUniqueOperator.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/by/ssmcountdistinct/unique/ShortRollupUniqueOperator.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.by.ssmcountdistinct.unique; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -523,7 +524,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("startTrackingPrevValues must only be called once"); } - prevFlusher = new UpdateCommitter<>(this, ShortRollupUniqueOperator::flushPrevious); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), ShortRollupUniqueOperator::flushPrevious); touchedStates = RowSetFactory.empty(); ssms.startTrackingPrevValues(); internalResult.startTrackingPrevValues(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/BaseNodeOperationsRecorder.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/BaseNodeOperationsRecorder.java index 15e54024ba3..7e3da401317 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/BaseNodeOperationsRecorder.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/BaseNodeOperationsRecorder.java @@ -2,11 +2,13 @@ import io.deephaven.api.Selectable; import io.deephaven.api.SortColumn; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; -import io.deephaven.engine.table.hierarchical.*; +import io.deephaven.engine.table.hierarchical.RollupTable; +import io.deephaven.engine.table.hierarchical.TreeTable; import io.deephaven.engine.table.impl.AbsoluteSortColumnConventions; import io.deephaven.engine.table.impl.NoSuchColumnException; import io.deephaven.engine.table.impl.QueryTable; @@ -14,6 +16,7 @@ import io.deephaven.engine.table.impl.select.SelectColumn; import io.deephaven.engine.table.impl.select.analyzers.SelectAndViewAnalyzer; import io.deephaven.engine.table.impl.sources.NullValueColumnSource; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.ColumnFormatting; import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; @@ -180,8 +183,10 @@ public TYPE sort(Collection columnsToSortBy) { static abstract class RecordingTableAdapter implements TableAdapter { private final TableDefinition definition; + private final UpdateGraph updateGraph; RecordingTableAdapter(@NotNull final TableDefinition definition) { + this.updateGraph = ExecutionContext.getContext().getUpdateGraph(); this.definition = definition; } @@ -189,6 +194,11 @@ static abstract class RecordingTableAdapter implements TableAdapter { public final TableDefinition getDefinition() { return definition; } + + @Override + public UpdateGraph getUpdateGraph() { + return updateGraph; + } } private static final class FormatRecordingTableAdapter extends RecordingTableAdapter { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/HierarchicalTableImpl.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/HierarchicalTableImpl.java index e3f4021d18b..2f11ad45800 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/HierarchicalTableImpl.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/HierarchicalTableImpl.java @@ -11,6 +11,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Any; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessArtifact; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.rowset.*; @@ -967,18 +968,21 @@ private Collection snapshotKeyTableNodeDirectives( @NotNull final Table keyTable, @Nullable final ColumnName keyTableActionColumn) { if (keyTable.isRefreshing()) { - final MutableObject> rootNodeInfoHolder = new MutableObject<>(); - // NB: This snapshot need not be notification-aware. If the key table ticks so be it, as long as we - // extracted a consistent view of its contents. - final SnapshotControl keyTableSnapshotControl = - makeSnapshotControl(false, true, (NotificationStepSource) keyTable); - callDataSnapshotFunction(getClass().getSimpleName() + "-keys", keyTableSnapshotControl, - (final boolean usePrev, final long beforeClockValue) -> { - rootNodeInfoHolder.setValue(extractKeyTableNodeDirectives( - keyTable, keyTableActionColumn, usePrev)); - return true; - }); - return rootNodeInfoHolder.getValue(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph( + keyTable.getUpdateGraph()).open()) { + final MutableObject> rootNodeInfoHolder = new MutableObject<>(); + // NB: This snapshot need not be notification-aware. If the key table ticks so be it, as long as we + // extracted a consistent view of its contents. + final SnapshotControl keyTableSnapshotControl = + makeSnapshotControl(false, true, (NotificationStepSource) keyTable); + callDataSnapshotFunction(getClass().getSimpleName() + "-keys", keyTableSnapshotControl, + (final boolean usePrev, final long beforeClockValue) -> { + rootNodeInfoHolder.setValue(extractKeyTableNodeDirectives( + keyTable, keyTableActionColumn, usePrev)); + return true; + }); + return rootNodeInfoHolder.getValue(); + } } else { return extractKeyTableNodeDirectives(keyTable, keyTableActionColumn, false); } @@ -1143,7 +1147,9 @@ private long snapshotData( @NotNull final RowSequence rows, @NotNull final WritableChunk[] destinations) { synchronized (snapshotState) { - try (final SafeCloseable ignored = snapshotState.initializeSnapshot(columns, rows, destinations)) { + try (final SafeCloseable ignored1 = snapshotState.initializeSnapshot(columns, rows, destinations); + final SafeCloseable ignored2 = ExecutionContext.getContext().withUpdateGraph( + source.getUpdateGraph()).open()) { if (source.isRefreshing()) { // NB: This snapshot control must be notification-aware, because if our sources tick we cannot // guarantee that we won't observe some newly-created components on their instantiation step. diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/TreeSourceRowLookup.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/TreeSourceRowLookup.java index cf806958b06..45811a21201 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/TreeSourceRowLookup.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/TreeSourceRowLookup.java @@ -1,10 +1,12 @@ package io.deephaven.engine.table.impl.hierarchical; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessArtifact; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.NotificationStepSource; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.by.AggregationRowLookup; +import io.deephaven.engine.updategraph.UpdateGraph; import org.jetbrains.annotations.NotNull; import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY; @@ -16,12 +18,14 @@ */ final class TreeSourceRowLookup extends LivenessArtifact implements NotificationStepSource { + private final UpdateGraph updateGraph; private final Object source; private final NotificationStepSource parent; private final AggregationRowLookup rowLookup; private final ColumnSource sourceRowKeyColumnSource; TreeSourceRowLookup(@NotNull final Object source, @NotNull final QueryTable sourceRowLookupTable) { + this.updateGraph = ExecutionContext.getContext().getUpdateGraph(); this.source = source; if (sourceRowLookupTable.isRefreshing()) { parent = sourceRowLookupTable; @@ -38,6 +42,11 @@ boolean sameSource(@NotNull final Object source) { return this.source == source; } + @Override + public UpdateGraph getUpdateGraph() { + return updateGraph; + } + /** * Gets the row key value where {@code nodeKey} exists in the table, or the {@link #noEntryValue()} if * {@code nodeKey} is not found in the table. diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/TreeTableFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/TreeTableFilter.java index a4aa3a8387b..c4b58b165b8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/TreeTableFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/hierarchical/TreeTableFilter.java @@ -12,6 +12,7 @@ import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.*; @@ -156,14 +157,17 @@ private TreeTableFilter(@NotNull final TreeTableImpl tree, @NotNull final WhereF parentIdSource = source.getColumnSource(tree.getParentIdentifierColumn().name()); if (source.isRefreshing()) { - final SwapListenerEx swapListener = new SwapListenerEx(source, sourceRowLookup); - source.addUpdateListener(swapListener); - ConstructSnapshot.callDataSnapshotFunction(System.identityHashCode(source) + ": ", - swapListener.makeSnapshotControl(), - (usePrev, beforeClockValue) -> { - doInitialFilter(swapListener, usePrev); - return true; - }); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph( + source.getUpdateGraph()).open()) { + final SwapListenerEx swapListener = new SwapListenerEx(source, sourceRowLookup); + source.addUpdateListener(swapListener); + ConstructSnapshot.callDataSnapshotFunction(System.identityHashCode(source) + ": ", + swapListener.makeSnapshotControl(), + (usePrev, beforeClockValue) -> { + doInitialFilter(swapListener, usePrev); + return true; + }); + } } else { doInitialFilter(null, false); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/indexer/RowSetIndexer.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/indexer/RowSetIndexer.java index a044ad0f4e5..f280e2c86f8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/indexer/RowSetIndexer.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/indexer/RowSetIndexer.java @@ -4,13 +4,13 @@ package io.deephaven.engine.table.impl.indexer; import io.deephaven.base.verify.Require; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.TupleSource; import io.deephaven.engine.table.impl.TupleSourceFactory; import io.deephaven.tuple.EmptyTuple; -import io.deephaven.engine.updategraph.LogicalClock; import org.jetbrains.annotations.NotNull; import java.util.*; @@ -110,7 +110,8 @@ public Map getGrouping(final TupleSource tupleSource) { if (ephemeralMappings == null) { ephemeralMappings = new WeakHashMap<>(); } - ephemeralMappings.put(sourcesKey, new MappingInfo(tupleSource, result, LogicalClock.DEFAULT.currentStep())); + ephemeralMappings.put(sourcesKey, new MappingInfo(tupleSource, result, + ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); } return result; @@ -150,7 +151,8 @@ public Map getPrevGrouping(final TupleSource tupleSource) { ephemeralPrevMappings = new WeakHashMap<>(); } ephemeralPrevMappings.put(sourcesKey, - new MappingInfo(tupleSource, result, LogicalClock.DEFAULT.currentStep())); + new MappingInfo(tupleSource, result, + ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); } return result; } @@ -626,7 +628,7 @@ private static Map lookupEphemeralMapping(List col return null; } - if (resultInfo.creationTick != LogicalClock.DEFAULT.currentStep()) { + if (resultInfo.creationTick != ExecutionContext.getContext().getUpdateGraph().clock().currentStep()) { groupingMap.remove(columnSourceKey); return null; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableCreatorImpl.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableCreatorImpl.java index a1654ca3cc2..06a76edd440 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableCreatorImpl.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableCreatorImpl.java @@ -5,6 +5,8 @@ import com.google.auto.service.AutoService; import io.deephaven.api.ColumnName; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.TrackingRowSet; import io.deephaven.engine.table.*; @@ -13,6 +15,7 @@ import io.deephaven.engine.table.impl.remote.ConstructSnapshot; import io.deephaven.engine.table.impl.sources.InMemoryColumnSource; import io.deephaven.qst.type.Type; +import io.deephaven.util.SafeCloseable; import org.apache.commons.lang3.mutable.MutableObject; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -87,6 +90,13 @@ public PartitionedTable of( @Override public PartitionedTable of(@NotNull final Table table) { + final UpdateGraph updateGraph = table.getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return internalOf(table); + } + } + + private PartitionedTable internalOf(@NotNull final Table table) { final Map>> splitColumns = table.getDefinition().getColumnStream().collect( Collectors.partitioningBy(cd -> Table.class.isAssignableFrom(cd.getDataType()))); final List> tableColumns = splitColumns.get(true); @@ -169,25 +179,32 @@ private PartitionedTableImpl constituentsToPartitionedTable( final TrackingRowSet rowSet = RowSetFactory.flat(constituentsToUse.length).toTracking(); final Map> columnSources = Map.of(CONSTITUENT.name(), InMemoryColumnSource.getImmutableMemoryColumnSource(constituentsToUse)); - final Table table = new QueryTable( - CONSTRUCTED_PARTITIONED_TABLE_DEFINITION, - rowSet, - columnSources) { - { - setFlat(); + + final Table table; + // validate that the update graph is consistent + final UpdateGraph updateGraph = constituents[0].getUpdateGraph(constituents); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + table = new QueryTable( + CONSTRUCTED_PARTITIONED_TABLE_DEFINITION, + rowSet, + columnSources) { + { + setFlat(); + } + }; + + for (final Table constituent : constituentsToUse) { + table.addParentReference(constituent); } - }; - for (final Table constituent : constituentsToUse) { - table.addParentReference(constituent); - } - return new PartitionedTableImpl( - table, - Collections.emptyList(), - false, - CONSTITUENT.name(), - constituentDefinitionToUse, - false, - true); + return new PartitionedTableImpl( + table, + Collections.emptyList(), + false, + CONSTITUENT.name(), + constituentDefinitionToUse, + false, + true); + } } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableImpl.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableImpl.java index 3bef0b77e70..ff4edfc1500 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableImpl.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableImpl.java @@ -29,7 +29,7 @@ import io.deephaven.engine.table.impl.sources.NullValueColumnSource; import io.deephaven.engine.table.impl.sources.UnionSourceManager; import io.deephaven.engine.table.iterators.ChunkedObjectColumnIterator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.util.SafeCloseable; import org.apache.commons.lang3.mutable.MutableInt; import org.apache.commons.lang3.mutable.MutableObject; @@ -154,10 +154,14 @@ public Table merge() { return merged; } if (table.isRefreshing()) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + table.getUpdateGraph().checkInitiateSerialTableOperation(); + } + + try (final SafeCloseable ignored = + ExecutionContext.getContext().withUpdateGraph(table.getUpdateGraph()).open()) { + final UnionSourceManager unionSourceManager = new UnionSourceManager(this); + merged = unionSourceManager.getResult(); } - final UnionSourceManager unionSourceManager = new UnionSourceManager(this); - merged = unionSourceManager.getResult(); merged.setAttribute(Table.MERGED_TABLE_ATTRIBUTE, Boolean.TRUE); if (!constituentChangesPermitted) { @@ -309,8 +313,9 @@ public PartitionedTableImpl partitionedTransform( @NotNull final BinaryOperator
transformer, final boolean expectRefreshingResults) { // Check safety before doing any extra work + final UpdateGraph updateGraph = table.getUpdateGraph(other.table()); if (table.isRefreshing() || other.table().isRefreshing()) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + updateGraph.checkInitiateSerialTableOperation(); } // Validate join compatibility @@ -428,14 +433,18 @@ public Table[] constituents() { private Table[] snapshotConstituents() { if (constituentChangesPermitted) { final MutableObject resultHolder = new MutableObject<>(); - ConstructSnapshot.callDataSnapshotFunction( - "PartitionedTable.constituents(): ", - ConstructSnapshot.makeSnapshotControl(false, true, (QueryTable) table.coalesce()), - (final boolean usePrev, final long beforeClockValue) -> { - resultHolder.setValue(fetchConstituents(usePrev)); - return true; - }); - return resultHolder.getValue(); + + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph( + table.getUpdateGraph()).open()) { + ConstructSnapshot.callDataSnapshotFunction( + "PartitionedTable.constituents(): ", + ConstructSnapshot.makeSnapshotControl(false, true, (QueryTable) table.coalesce()), + (final boolean usePrev, final long beforeClockValue) -> { + resultHolder.setValue(fetchConstituents(usePrev)); + return true; + }); + return resultHolder.getValue(); + } } else { return fetchConstituents(false); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableProxyImpl.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableProxyImpl.java index 042fe2e6b3c..224ffe086d5 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableProxyImpl.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/partitioned/PartitionedTableProxyImpl.java @@ -24,8 +24,9 @@ import io.deephaven.engine.table.impl.select.SourceColumn; import io.deephaven.engine.table.impl.select.WhereFilter; import io.deephaven.engine.table.impl.select.analyzers.SelectAndViewAnalyzer; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.TableTools; +import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -135,6 +136,7 @@ private static ExecutionContext getOrCreateExecutionContext(final boolean requir if (context == null) { final ExecutionContext.Builder builder = ExecutionContext.newBuilder() .captureQueryCompiler() + .captureUpdateGraph() .markSystemic(); if (requiresFullContext) { builder.newQueryLibrary(); @@ -176,43 +178,48 @@ private PartitionedTable.Proxy complexTransform( if (other instanceof Table) { final Table otherTable = (Table) other; final boolean refreshingResults = target.table().isRefreshing() || otherTable.isRefreshing(); + final UpdateGraph updateGraph = target.table().getUpdateGraph(otherTable); if (refreshingResults && joinMatches != null) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + updateGraph.checkInitiateSerialTableOperation(); + } + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + return new PartitionedTableProxyImpl( + target.transform(context, ct -> transformer.apply(ct, otherTable), refreshingResults), + requireMatchingKeys, + sanityCheckJoins); } - return new PartitionedTableProxyImpl( - target.transform(context, ct -> transformer.apply(ct, otherTable), refreshingResults), - requireMatchingKeys, - sanityCheckJoins); } if (other instanceof PartitionedTable.Proxy) { final PartitionedTable.Proxy otherProxy = (PartitionedTable.Proxy) other; final PartitionedTable otherTarget = otherProxy.target(); final boolean refreshingResults = target.table().isRefreshing() || otherTarget.table().isRefreshing(); - - if (target.table().isRefreshing() || otherTarget.table().isRefreshing()) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + final UpdateGraph updateGraph = target.table().getUpdateGraph(otherTarget.table()); + if (refreshingResults) { + updateGraph.checkInitiateSerialTableOperation(); + } + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + + final MatchPair[] keyColumnNamePairs = PartitionedTableImpl.matchKeyColumns(target, otherTarget); + final DependentValidation uniqueKeys = requireMatchingKeys + ? matchingKeysValidation(target, otherTarget, keyColumnNamePairs) + : null; + final DependentValidation overlappingLhsJoinKeys = sanityCheckJoins && joinMatches != null + ? overlappingLhsJoinKeysValidation(target, joinMatches) + : null; + final DependentValidation overlappingRhsJoinKeys = sanityCheckJoins && joinMatches != null + ? overlappingRhsJoinKeysValidation(otherTarget, joinMatches) + : null; + + final Table validatedLhsTable = validated(target.table(), uniqueKeys, overlappingLhsJoinKeys); + final Table validatedRhsTable = validated(otherTarget.table(), uniqueKeys, overlappingRhsJoinKeys); + final PartitionedTable lhsToUse = maybeRewrap(validatedLhsTable, target); + final PartitionedTable rhsToUse = maybeRewrap(validatedRhsTable, otherTarget); + + return new PartitionedTableProxyImpl( + lhsToUse.partitionedTransform(rhsToUse, context, transformer, refreshingResults), + requireMatchingKeys, + sanityCheckJoins); } - - final MatchPair[] keyColumnNamePairs = PartitionedTableImpl.matchKeyColumns(target, otherTarget); - final DependentValidation uniqueKeys = requireMatchingKeys - ? matchingKeysValidation(target, otherTarget, keyColumnNamePairs) - : null; - final DependentValidation overlappingLhsJoinKeys = sanityCheckJoins && joinMatches != null - ? overlappingLhsJoinKeysValidation(target, joinMatches) - : null; - final DependentValidation overlappingRhsJoinKeys = sanityCheckJoins && joinMatches != null - ? overlappingRhsJoinKeysValidation(otherTarget, joinMatches) - : null; - - final Table validatedLhsTable = validated(target.table(), uniqueKeys, overlappingLhsJoinKeys); - final Table validatedRhsTable = validated(otherTarget.table(), uniqueKeys, overlappingRhsJoinKeys); - final PartitionedTable lhsToUse = maybeRewrap(validatedLhsTable, target); - final PartitionedTable rhsToUse = maybeRewrap(validatedRhsTable, otherTarget); - - return new PartitionedTableProxyImpl( - lhsToUse.partitionedTransform(rhsToUse, context, transformer, refreshingResults), - requireMatchingKeys, - sanityCheckJoins); } throw new IllegalArgumentException("Unexpected TableOperations input " + other + ", expected Table or PartitionedTable.Proxy"); @@ -249,8 +256,8 @@ private static Table validated( return parent; } - // NB: All code paths that pass non-null validations for refreshing parents call checkInitiateTableOperation - // first, so we can dispense with snapshots and swap listeners. + // NB: All code paths that pass non-null validations for refreshing parents call + // checkInitiateSerialTableOperation first, so we can dispense with snapshots and swap listeners. final QueryTable coalescedParent = (QueryTable) parent.coalesce(); final QueryTable child = coalescedParent.getSubTable( coalescedParent.getRowSet(), diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/perf/UpdatePerformanceTracker.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/perf/UpdatePerformanceTracker.java index 0c52aad3d14..d39c8ac66db 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/perf/UpdatePerformanceTracker.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/perf/UpdatePerformanceTracker.java @@ -8,7 +8,7 @@ import io.deephaven.engine.tablelogger.EngineTableLoggers; import io.deephaven.engine.tablelogger.UpdatePerformanceLogLogger; import io.deephaven.engine.tablelogger.impl.memory.MemoryTableLogger; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.table.impl.*; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; @@ -25,7 +25,7 @@ /** *

- * This tool is meant to track periodic update events that take place in an {@link UpdateGraphProcessor}. This generally + * This tool is meant to track periodic update events that take place in an {@link PeriodicUpdateGraph}. This generally * includes: *

    *
  1. Update source {@code run()} invocations
  2. @@ -34,7 +34,7 @@ *
* (1) * - * @apiNote Regarding thread safety, this class interacts with a singleton UpdateGraphProcessor and expects all calls to + * @apiNote Regarding thread safety, this class interacts with a singleton PeriodicUpdateGraph and expects all calls to * {@link #getEntry(String)}, {@link PerformanceEntry#onUpdateStart()}, and * {@link PerformanceEntry#onUpdateEnd()} to be performed while protected by the UGP's lock. */ @@ -102,9 +102,8 @@ public void run() { // should log, but no logger handy // ignore } - UpdateGraphProcessor.DEFAULT.sharedLock().doLocked( - () -> finishInterval(intervalStartTimeMillis, - System.currentTimeMillis(), + getQueryTable().getUpdateGraph().sharedLock().doLocked( + () -> finishInterval(intervalStartTimeMillis, System.currentTimeMillis(), System.nanoTime() - intervalStartTimeNanos)); } } @@ -148,7 +147,7 @@ public final PerformanceEntry getEntry(final String description) { /** * Do entry maintenance, generate an interval performance report table for all active entries, and reset for the - * next interval. Note: This method is only called under the UpdateGraphProcessor instance's lock. This + * next interval. Note: This method is only called under the PeriodicUpdateGraph instance's lock. This * ensures exclusive access to the entries, and also prevents any other thread from removing from entries. * * @param intervalStartTimeMillis interval start time in millis diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/rangejoin/RangeJoinOperation.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/rangejoin/RangeJoinOperation.java index cea8fa69ce5..8cde8eb56ad 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/rangejoin/RangeJoinOperation.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/rangejoin/RangeJoinOperation.java @@ -260,7 +260,11 @@ public Result initialize(final boolean usePrev, final long beforeClo jobScheduler = ImmediateJobScheduler.INSTANCE; } - return new Result<>(staticRangeJoin(jobScheduler)); + final ExecutionContext executionContext = ExecutionContext.newBuilder() + .captureUpdateGraph() + .markSystemic().build(); + + return new Result<>(staticRangeJoin(jobScheduler, executionContext)); } @Override @@ -268,9 +272,11 @@ public MemoizedOperationKey getMemoizedOperationKey() { return memoizedOperationKey; } - private QueryTable staticRangeJoin(@NotNull final JobScheduler jobScheduler) { + private QueryTable staticRangeJoin( + @NotNull final JobScheduler jobScheduler, + @NotNull final ExecutionContext executionContext) { final CompletableFuture resultFuture = new CompletableFuture<>(); - new StaticRangeJoinPhase1(jobScheduler, resultFuture).start(); + new StaticRangeJoinPhase1(jobScheduler, executionContext, resultFuture).start(); try { return resultFuture.get(); } catch (InterruptedException e) { @@ -283,12 +289,15 @@ private QueryTable staticRangeJoin(@NotNull final JobScheduler jobScheduler) { private static class RangeJoinPhase { protected final JobScheduler jobScheduler; + protected final ExecutionContext executionContext; protected final CompletableFuture resultFuture; protected RangeJoinPhase( @NotNull final JobScheduler jobScheduler, + @NotNull final ExecutionContext executionContext, @NotNull final CompletableFuture resultFuture) { this.jobScheduler = jobScheduler; + this.executionContext = executionContext; this.resultFuture = resultFuture; } } @@ -297,15 +306,16 @@ private class StaticRangeJoinPhase1 extends RangeJoinPhase { private StaticRangeJoinPhase1( @NotNull final JobScheduler jobScheduler, + @NotNull final ExecutionContext executionContext, @NotNull final CompletableFuture resultFuture) { - super(jobScheduler, resultFuture); + super(jobScheduler, executionContext, resultFuture); } private void start() { // Perform the left table work via the job scheduler, possibly concurrently with the right table work. final CompletableFuture
groupLeftTableFuture = new CompletableFuture<>(); jobScheduler.submit( - ExecutionContext.getContextToRecord(), + executionContext, () -> groupLeftTableFuture.complete(groupLeftTable()), logOutput -> logOutput.append("static range join group left table"), groupLeftTableFuture::completeExceptionally); @@ -331,7 +341,9 @@ private void start() { resultFuture.completeExceptionally(e); return; } - new StaticRangeJoinPhase2(jobScheduler, resultFuture).start(leftTableGrouped, rightTableGrouped); + new StaticRangeJoinPhase2(jobScheduler, executionContext, resultFuture).start( + leftTableGrouped, + rightTableGrouped); } private Table groupLeftTable() { @@ -390,8 +402,9 @@ private class StaticRangeJoinPhase2 private StaticRangeJoinPhase2( @NotNull final JobScheduler jobScheduler, + @NotNull final ExecutionContext executionContext, @NotNull final CompletableFuture resultFuture) { - super(jobScheduler, resultFuture); + super(jobScheduler, executionContext, resultFuture); leftStartValues = ReinterpretUtils.maybeConvertToPrimitive( leftTable.getColumnSource(rangeMatch.leftStartColumn().name())); @@ -458,13 +471,13 @@ private void start(@NotNull final Table leftTableGrouped, @NotNull final Table r leftGroupRowSets = joinedInputTables.getColumnSource(LEFT_ROW_SET.name(), RowSet.class); rightGroupRowSets = joinedInputTables.getColumnSource(RIGHT_ROW_SET.name(), RowSet.class); jobScheduler.iterateParallel( - ExecutionContext.getContextToRecord(), + executionContext, logOutput -> logOutput.append("static range join find ranges"), TaskContext::new, 0, joinedInputTables.intSize(), this, - () -> new StaticRangeJoinPhase3(jobScheduler, resultFuture).start( + () -> new StaticRangeJoinPhase3(jobScheduler, executionContext, resultFuture).start( rightGroupRowSets, outputSlotsExposed, outputStartPositionsInclusiveExposed, @@ -769,8 +782,9 @@ private class StaticRangeJoinPhase3 extends RangeJoinPhase { private StaticRangeJoinPhase3( @NotNull final JobScheduler jobScheduler, + @NotNull final ExecutionContext executionContext, @NotNull final CompletableFuture resultFuture) { - super(jobScheduler, resultFuture); + super(jobScheduler, executionContext, resultFuture); } public void start( diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/ConstructSnapshot.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/ConstructSnapshot.java index bc0170e566f..d1e67b404b8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/ConstructSnapshot.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/remote/ConstructSnapshot.java @@ -7,14 +7,16 @@ import io.deephaven.base.log.LogOutput; import io.deephaven.base.log.LogOutputAppendable; import io.deephaven.base.verify.Assert; -import io.deephaven.chunk.util.pools.ChunkPoolConstants; import io.deephaven.configuration.Configuration; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.SharedContext; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.sources.ReinterpretUtils; import io.deephaven.engine.table.impl.util.*; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.io.log.LogEntry; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.exceptions.CancellationException; @@ -22,7 +24,6 @@ import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.updategraph.NotificationQueue; import io.deephaven.engine.updategraph.WaitNotification; -import io.deephaven.proto.backplane.grpc.Config; import io.deephaven.util.datastructures.LongSizedDataStructure; import io.deephaven.engine.liveness.LivenessManager; import io.deephaven.engine.liveness.LivenessScope; @@ -30,7 +31,6 @@ import io.deephaven.engine.table.impl.BaseTable; import io.deephaven.engine.table.impl.NotificationStepSource; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.chunk.*; import io.deephaven.util.SafeCloseable; import io.deephaven.UncheckedDeephavenException; @@ -65,14 +65,14 @@ public NoSnapshotAllowedException(String reason) { private static final io.deephaven.io.logger.Logger log = LoggerFactory.getLogger(ConstructSnapshot.class); /** - * The maximum number of allowed attempts to construct a snapshot concurrently with {@link UpdateGraphProcessor} run + * The maximum number of allowed attempts to construct a snapshot concurrently with {@link PeriodicUpdateGraph} run * processing. After this many attempts, we fall back and wait until we can block refreshes. */ private static final int MAX_CONCURRENT_ATTEMPTS = Configuration.getInstance().getIntegerWithDefault("ConstructSnapshot.maxConcurrentAttempts", 2); /** - * The maximum duration of an attempt to construct a snapshot concurrently with {@link UpdateGraphProcessor} run + * The maximum duration of an attempt to construct a snapshot concurrently with {@link PeriodicUpdateGraph} run * processing. If an unsuccessful attempt takes longer than this timeout, we will fall back and wait until we can * block refreshes. */ @@ -233,7 +233,7 @@ private boolean concurrentAttemptInconsistent() { } if (!clockConsistent( activeConcurrentAttempt.beforeClockValue, - lastObservedClockValue = LogicalClock.DEFAULT.currentValue(), + lastObservedClockValue = ExecutionContext.getContext().getUpdateGraph().clock().currentValue(), activeConcurrentAttempt.usingPreviousValues)) { return true; } @@ -279,7 +279,7 @@ private void maybeWaitForSatisfaction(@Nullable final NotificationQueue.Dependen || WaitNotification.waitForSatisfaction(beforeStep, dependency)) { return; } - lastObservedClockValue = LogicalClock.DEFAULT.currentValue(); + lastObservedClockValue = ExecutionContext.getContext().getUpdateGraph().clock().currentValue(); // Blow up if we've detected a step change if (LogicalClock.getStep(lastObservedClockValue) != beforeStep) { throw new SnapshotInconsistentException(); @@ -319,8 +319,9 @@ private LogOutput appendConcurrentAttemptClockInfo(@NotNull final LogOutput logO * @return Whether this thread currently holds a lock on the UGP */ private boolean locked() { - return UpdateGraphProcessor.DEFAULT.sharedLock().isHeldByCurrentThread() - || UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread(); + if (ExecutionContext.getContext().getUpdateGraph().sharedLock().isHeldByCurrentThread()) + return true; + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().isHeldByCurrentThread(); } /** @@ -330,7 +331,7 @@ private void maybeAcquireLock() { if (locked()) { return; } - UpdateGraphProcessor.DEFAULT.sharedLock().lock(); + ExecutionContext.getContext().getUpdateGraph().sharedLock().lock(); acquiredLock = true; } @@ -339,7 +340,7 @@ private void maybeAcquireLock() { */ private void maybeReleaseLock() { if (acquiredLock && concurrentSnapshotDepth == 0 && lockedSnapshotDepth == 0) { - UpdateGraphProcessor.DEFAULT.sharedLock().unlock(); + ExecutionContext.getContext().getUpdateGraph().sharedLock().unlock(); acquiredLock = false; } } @@ -434,7 +435,7 @@ public static LogOutput appendConcurrentAttemptClockInfo(@NotNull final LogOutpu * @return a snapshot of the entire base table. */ public static InitialSnapshot constructInitialSnapshot(final Object logIdentityObject, - @NotNull final BaseTable table) { + @NotNull final BaseTable table) { return constructInitialSnapshot(logIdentityObject, table, null, null); } @@ -450,7 +451,7 @@ public static InitialSnapshot constructInitialSnapshot(final Object logIdentityO * @return a snapshot of the entire base table. */ public static InitialSnapshot constructInitialSnapshot(final Object logIdentityObject, - @NotNull final BaseTable table, + @NotNull final BaseTable table, @Nullable final BitSet columnsToSerialize, @Nullable final RowSet keysToSnapshot) { return constructInitialSnapshot(logIdentityObject, table, columnsToSerialize, keysToSnapshot, @@ -458,18 +459,21 @@ public static InitialSnapshot constructInitialSnapshot(final Object logIdentityO } static InitialSnapshot constructInitialSnapshot(final Object logIdentityObject, - @NotNull final BaseTable table, + @NotNull final BaseTable table, @Nullable final BitSet columnsToSerialize, @Nullable final RowSet keysToSnapshot, @NotNull final SnapshotControl control) { - final InitialSnapshot snapshot = new InitialSnapshot(); + final UpdateGraph updateGraph = table.getUpdateGraph(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + final InitialSnapshot snapshot = new InitialSnapshot(); - final SnapshotFunction doSnapshot = (usePrev, beforeClockValue) -> serializeAllTable(usePrev, snapshot, table, - logIdentityObject, columnsToSerialize, keysToSnapshot); + final SnapshotFunction doSnapshot = (usePrev, beforeClockValue) -> serializeAllTable( + usePrev, snapshot, table, logIdentityObject, columnsToSerialize, keysToSnapshot); - snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); + snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); - return snapshot; + return snapshot; + } } /** @@ -484,15 +488,18 @@ static InitialSnapshot constructInitialSnapshot(final Object logIdentityObject, * @return a snapshot of the entire base table. */ public static InitialSnapshot constructInitialSnapshotInPositionSpace(final Object logIdentityObject, - @NotNull final BaseTable table, + @NotNull final BaseTable table, @Nullable final BitSet columnsToSerialize, @Nullable final RowSet positionsToSnapshot) { - return constructInitialSnapshotInPositionSpace(logIdentityObject, table, columnsToSerialize, - positionsToSnapshot, makeSnapshotControl(false, table.isRefreshing(), table)); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph( + table.getUpdateGraph()).open()) { + return constructInitialSnapshotInPositionSpace(logIdentityObject, table, columnsToSerialize, + positionsToSnapshot, makeSnapshotControl(false, table.isRefreshing(), table)); + } } static InitialSnapshot constructInitialSnapshotInPositionSpace(final Object logIdentityObject, - @NotNull final BaseTable table, + @NotNull final BaseTable table, @Nullable final BitSet columnsToSerialize, @Nullable final RowSet positionsToSnapshot, @NotNull final SnapshotControl control) { @@ -512,7 +519,10 @@ static InitialSnapshot constructInitialSnapshotInPositionSpace(final Object logI return serializeAllTable(usePrev, snapshot, table, logIdentityObject, columnsToSerialize, keysToSnapshot); }; - snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph( + table.getUpdateGraph()).open()) { + snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); + } return snapshot; } @@ -524,8 +534,7 @@ static InitialSnapshot constructInitialSnapshotInPositionSpace(final Object logI * @param table the table to snapshot. * @return a snapshot of the entire base table. */ - public static BarrageMessage constructBackplaneSnapshot(final Object logIdentityObject, - final BaseTable table) { + public static BarrageMessage constructBackplaneSnapshot(final Object logIdentityObject, final BaseTable table) { return constructBackplaneSnapshotInPositionSpace(logIdentityObject, table, null, null, null); } @@ -541,7 +550,7 @@ public static BarrageMessage constructBackplaneSnapshot(final Object logIdentity * @return a snapshot of the entire base table. */ public static BarrageMessage constructBackplaneSnapshotInPositionSpace(final Object logIdentityObject, - final BaseTable table, + final BaseTable table, @Nullable final BitSet columnsToSerialize, @Nullable final RowSequence positionsToSnapshot, @Nullable final RowSequence reversePositionsToSnapshot) { @@ -563,48 +572,52 @@ public static BarrageMessage constructBackplaneSnapshotInPositionSpace(final Obj * @return a snapshot of the entire base table. */ public static BarrageMessage constructBackplaneSnapshotInPositionSpace(final Object logIdentityObject, - @NotNull final BaseTable table, + @NotNull final BaseTable table, @Nullable final BitSet columnsToSerialize, @Nullable final RowSequence positionsToSnapshot, @Nullable final RowSequence reversePositionsToSnapshot, @NotNull final SnapshotControl control) { - final BarrageMessage snapshot = new BarrageMessage(); - snapshot.isSnapshot = true; - snapshot.shifted = RowSetShiftData.EMPTY; + final UpdateGraph updateGraph = table.getUpdateGraph(); + try (final SafeCloseable ignored1 = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + final BarrageMessage snapshot = new BarrageMessage(); + snapshot.isSnapshot = true; + snapshot.shifted = RowSetShiftData.EMPTY; - final SnapshotFunction doSnapshot = (usePrev, beforeClockValue) -> { - final RowSet keysToSnapshot; - if (positionsToSnapshot == null && reversePositionsToSnapshot == null) { - keysToSnapshot = null; - } else { - final RowSet rowSetToUse = usePrev ? table.getRowSet().copyPrev() : table.getRowSet(); - try (final SafeCloseable ignored = usePrev ? rowSetToUse : null) { - final WritableRowSet forwardKeys = - positionsToSnapshot == null ? null : rowSetToUse.subSetForPositions(positionsToSnapshot); - final RowSet reverseKeys = reversePositionsToSnapshot == null ? null - : rowSetToUse.subSetForReversePositions(reversePositionsToSnapshot); - if (forwardKeys != null) { - if (reverseKeys != null) { - forwardKeys.insert(reverseKeys); - reverseKeys.close(); + final SnapshotFunction doSnapshot = (usePrev, beforeClockValue) -> { + final RowSet keysToSnapshot; + if (positionsToSnapshot == null && reversePositionsToSnapshot == null) { + keysToSnapshot = null; + } else { + final RowSet rowSetToUse = usePrev ? table.getRowSet().copyPrev() : table.getRowSet(); + try (final SafeCloseable ignored = usePrev ? rowSetToUse : null) { + final WritableRowSet forwardKeys = + positionsToSnapshot == null ? null + : rowSetToUse.subSetForPositions(positionsToSnapshot); + final RowSet reverseKeys = reversePositionsToSnapshot == null ? null + : rowSetToUse.subSetForReversePositions(reversePositionsToSnapshot); + if (forwardKeys != null) { + if (reverseKeys != null) { + forwardKeys.insert(reverseKeys); + reverseKeys.close(); + } + keysToSnapshot = forwardKeys; + } else { + keysToSnapshot = reverseKeys; } - keysToSnapshot = forwardKeys; - } else { - keysToSnapshot = reverseKeys; } } - } - try (final RowSet ignored = keysToSnapshot) { - return serializeAllTable(usePrev, snapshot, table, logIdentityObject, columnsToSerialize, - keysToSnapshot); - } - }; + try (final RowSet ignored = keysToSnapshot) { + return serializeAllTable(usePrev, snapshot, table, logIdentityObject, columnsToSerialize, + keysToSnapshot); + } + }; - snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); - snapshot.firstSeq = snapshot.lastSeq = snapshot.step; + snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); + snapshot.firstSeq = snapshot.lastSeq = snapshot.step; - return snapshot; + return snapshot; + } } /** @@ -616,18 +629,24 @@ public static BarrageMessage constructBackplaneSnapshotInPositionSpace(final Obj * @return list of the resulting {@link InitialSnapshot}s */ public static List constructInitialSnapshots(final Object logIdentityObject, - final BaseTable... tables) { - final List snapshots = new ArrayList<>(); + final BaseTable... tables) { + if (tables.length == 0) { + return Collections.emptyList(); + } + final UpdateGraph updateGraph = NotificationQueue.Dependency.getUpdateGraph(null, tables); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(updateGraph).open()) { + final List snapshots = new ArrayList<>(); - final NotificationObliviousMultipleSourceSnapshotControl snapshotControl = - new NotificationObliviousMultipleSourceSnapshotControl(tables); + final NotificationObliviousMultipleSourceSnapshotControl snapshotControl = + new NotificationObliviousMultipleSourceSnapshotControl(tables); - final SnapshotFunction doSnapshot = - (usePrev, beforeClockValue) -> serializeAllTables(usePrev, snapshots, tables, logIdentityObject); + final SnapshotFunction doSnapshot = + (usePrev, beforeClockValue) -> serializeAllTables(usePrev, snapshots, tables, logIdentityObject); - callDataSnapshotFunction(System.identityHashCode(logIdentityObject), snapshotControl, doSnapshot); + callDataSnapshotFunction(System.identityHashCode(logIdentityObject), snapshotControl, doSnapshot); - return snapshots; + return snapshots; + } } @FunctionalInterface @@ -949,10 +968,11 @@ public Boolean usePreviousValues(final long beforeClockValue) { .filter((final NotificationQueue.Dependency dep) -> !dep.satisfied(beforeStep)) .toArray(NotificationStepSource[]::new); if (notYetSatisfied.length > 0 - && !WaitNotification.waitForSatisfaction(beforeStep, notYetSatisfied) - && LogicalClock.DEFAULT.currentStep() != beforeStep) { - // If we missed a step change, we've already failed, request a do-over. - return null; + && !WaitNotification.waitForSatisfaction(beforeStep, notYetSatisfied)) { + if (ExecutionContext.getContext().getUpdateGraph().clock().currentStep() != beforeStep) { + // If we missed a step change, we've already failed, request a do-over. + return null; + } } } return false; @@ -1003,10 +1023,11 @@ public Boolean usePreviousValues(final long beforeClockValue) { .filter((final NotificationQueue.Dependency dep) -> !dep.satisfied(beforeStep)) .toArray(NotificationStepSource[]::new); if (notYetSatisfied.length > 0 - && !WaitNotification.waitForSatisfaction(beforeStep, notYetSatisfied) - && LogicalClock.DEFAULT.currentStep() != beforeStep) { - // If we missed a step change, we've already failed, request a do-over. - return null; + && !WaitNotification.waitForSatisfaction(beforeStep, notYetSatisfied)) { + if (ExecutionContext.getContext().getUpdateGraph().clock().currentStep() != beforeStep) { + // If we missed a step change, we've already failed, request a do-over. + return null; + } } } return false; @@ -1066,7 +1087,7 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l final LivenessManager initialLivenessManager = LivenessScopeStack.peek(); while (numConcurrentAttempts < MAX_CONCURRENT_ATTEMPTS && !state.locked()) { ++numConcurrentAttempts; - final long beforeClockValue = LogicalClock.DEFAULT.currentValue(); + final long beforeClockValue = ExecutionContext.getContext().getUpdateGraph().clock().currentValue(); final long attemptStart = System.currentTimeMillis(); final Boolean previousValuesRequested = control.usePreviousValues(beforeClockValue); @@ -1077,9 +1098,11 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l // noinspection AutoUnboxing final boolean usePrev = previousValuesRequested; if (LogicalClock.getState(beforeClockValue) == LogicalClock.State.Idle && usePrev) { + // noinspection ThrowableNotThrown Assert.statementNeverExecuted("Previous values requested while not updating: " + beforeClockValue); } - if (UpdateGraphProcessor.DEFAULT.isRefreshThread() && usePrev) { + if (ExecutionContext.getContext().getUpdateGraph().currentThreadProcessesUpdates() && usePrev) { + // noinspection ThrowableNotThrown Assert.statementNeverExecuted("Previous values requested from a run thread: " + beforeClockValue); } @@ -1099,7 +1122,8 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l log.debug().append(logPrefix).append(" Disallowed UGP-less Snapshot Function took ") .append(System.currentTimeMillis() - attemptStart).append("ms") .append(", beforeClockValue=").append(beforeClockValue) - .append(", afterClockValue=").append(LogicalClock.DEFAULT.currentValue()) + .append(", afterClockValue=") + .append(ExecutionContext.getContext().getUpdateGraph().clock().currentValue()) .append(", usePrev=").append(usePrev) .endl(); } @@ -1111,7 +1135,7 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l state.endConcurrentSnapshot(startObject); } - final long afterClockValue = LogicalClock.DEFAULT.currentValue(); + final long afterClockValue = ExecutionContext.getContext().getUpdateGraph().clock().currentValue(); try { snapshotSuccessful = clockConsistent(beforeClockValue, afterClockValue, usePrev) && control.snapshotCompletedConsistently(afterClockValue, usePrev); @@ -1185,7 +1209,7 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l } state.startLockedSnapshot(); try { - final long beforeClockValue = LogicalClock.DEFAULT.currentValue(); + final long beforeClockValue = ExecutionContext.getContext().getUpdateGraph().clock().currentValue(); final Boolean previousValuesRequested = control.usePreviousValues(beforeClockValue); if (!Boolean.FALSE.equals(previousValuesRequested)) { @@ -1198,7 +1222,7 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l functionSuccessful = function.call(false, beforeClockValue); Assert.assertion(functionSuccessful, "functionSuccessful"); - final long afterClockValue = LogicalClock.DEFAULT.currentValue(); + final long afterClockValue = ExecutionContext.getContext().getUpdateGraph().clock().currentValue(); Assert.eq(beforeClockValue, "beforeClockValue", afterClockValue, "afterClockValue"); @@ -1244,10 +1268,11 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l */ public static boolean serializeAllTable(boolean usePrev, InitialSnapshot snapshot, - BaseTable table, + BaseTable table, Object logIdentityObject, BitSet columnsToSerialize, RowSet keysToSnapshot) { + // noinspection resource snapshot.rowSet = (usePrev ? table.getRowSet().copyPrev() : table.getRowSet()).copy(); if (keysToSnapshot != null) { @@ -1258,7 +1283,7 @@ public static boolean serializeAllTable(boolean usePrev, LongSizedDataStructure.intSize("construct snapshot", snapshot.rowsIncluded.size()); - final Map sourceMap = table.getColumnSourceMap(); + final Map> sourceMap = table.getColumnSourceMap(); final String[] columnSources = sourceMap.keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); snapshot.dataColumns = new Object[columnSources.length]; @@ -1315,7 +1340,7 @@ public static boolean serializeAllTable(boolean usePrev, */ public static boolean serializeAllTable(final boolean usePrev, final BarrageMessage snapshot, - final BaseTable table, + final BaseTable table, final Object logIdentityObject, final BitSet columnsToSerialize, final RowSet keysToSnapshot) { @@ -1333,7 +1358,7 @@ public static boolean serializeAllTable(final boolean usePrev, snapshot.rowsIncluded = snapshot.rowsAdded.copy(); } - final Map sourceMap = table.getColumnSourceMap(); + final Map> sourceMap = table.getColumnSourceMap(); final String[] columnSources = sourceMap.keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); try (final SharedContext sharedContext = @@ -1390,11 +1415,11 @@ public static boolean serializeAllTable(final boolean usePrev, return true; } - private static boolean serializeAllTables(boolean usePrev, List snapshots, BaseTable[] tables, + private static boolean serializeAllTables(boolean usePrev, List snapshots, BaseTable[] tables, Object logIdentityObject) { snapshots.clear(); - for (final BaseTable table : tables) { + for (final BaseTable table : tables) { final InitialSnapshot snapshot = new InitialSnapshot(); snapshots.add(snapshot); if (!serializeAllTable(usePrev, snapshot, table, logIdentityObject, null, null)) { @@ -1434,24 +1459,6 @@ private static Object getSnapshotData(final ColumnSource columnSource, fi } } - private static WritableChunk getSnapshotDataAsChunk(final ColumnSource columnSource, - final SharedContext sharedContext, final RowSet rowSet, final boolean usePrev) { - final ColumnSource sourceToUse = ReinterpretUtils.maybeConvertToPrimitive(columnSource); - final int size = rowSet.intSize(); - try (final ColumnSource.FillContext context = sharedContext != null - ? sourceToUse.makeFillContext(size, sharedContext) - : sourceToUse.makeFillContext(size)) { - final ChunkType chunkType = sourceToUse.getChunkType(); - final WritableChunk result = chunkType.makeWritableChunk(size); - if (usePrev) { - sourceToUse.fillPrevChunk(context, result, rowSet); - } else { - sourceToUse.fillChunk(context, result, rowSet); - } - return result; - } - } - private static ArrayList> getSnapshotDataAsChunkList(final ColumnSource columnSource, final SharedContext sharedContext, final RowSet rowSet, final boolean usePrev) { long offset = 0; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/replay/Replayer.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/replay/Replayer.java index a0987b0f3e0..257e1518636 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/replay/Replayer.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/replay/Replayer.java @@ -5,9 +5,10 @@ import io.deephaven.base.clock.Clock; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.exceptions.CancellationException; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.updategraph.TerminalNotification; @@ -36,8 +37,10 @@ public class Replayer implements ReplayerInterface, Runnable { private boolean lastLap; private final ReplayerHandle handle = () -> Replayer.this; - // Condition variable for use with UpdateGraphProcessor lock - the object monitor is no longer used - private final Condition ugpCondition = UpdateGraphProcessor.DEFAULT.exclusiveLock().newCondition(); + private final UpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph(); + + // Condition variable for use with PeriodicUpdateGraph lock - the object monitor is no longer used + private final Condition ugpCondition = updateGraph.exclusiveLock().newCondition(); /** * Creates a new replayer. @@ -58,7 +61,7 @@ public Replayer(Instant startTime, Instant endTime) { public void start() { deltaNanos = DateTimeUtils.millisToNanos(System.currentTimeMillis()) - DateTimeUtils.epochNanos(startTime); for (Runnable currentTable : currentTables) { - UpdateGraphProcessor.DEFAULT.addSource(currentTable); + updateGraph.addSource(currentTable); } } @@ -84,14 +87,14 @@ public void shutdown() throws IOException { if (done) { return; } - UpdateGraphProcessor.DEFAULT.removeSources(currentTables); + updateGraph.removeSources(currentTables); currentTables = null; - if (UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread()) { + if (updateGraph.exclusiveLock().isHeldByCurrentThread()) { shutdownInternal(); - } else if (UpdateGraphProcessor.DEFAULT.isRefreshThread()) { - UpdateGraphProcessor.DEFAULT.addNotification(new TerminalNotification() { + } else if (updateGraph.currentThreadProcessesUpdates()) { + updateGraph.addNotification(new TerminalNotification() { @Override - public boolean mustExecuteWithUgpLock() { + public boolean mustExecuteWithUpdateGraphLock() { return true; } @@ -101,13 +104,13 @@ public void run() { } }); } else { - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(this::shutdownInternal); + updateGraph.exclusiveLock().doLocked(this::shutdownInternal); } } private void shutdownInternal() { - Assert.assertion(UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread(), - "UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread()"); + Assert.assertion(updateGraph.exclusiveLock().isHeldByCurrentThread(), + "updateGraph.exclusiveLock().isHeldByCurrentThread()"); done = true; ugpCondition.signalAll(); } @@ -125,7 +128,7 @@ public void waitDone(long maxTimeMillis) { if (done) { return; } - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(() -> { + updateGraph.exclusiveLock().doLocked(() -> { while (!done && expiryTime > System.currentTimeMillis()) { try { ugpCondition.await(expiryTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS); @@ -191,7 +194,7 @@ public Table replay(Table dataSource, String timeColumn) { new ReplayTable(dataSource.getRowSet(), dataSource.getColumnSourceMap(), timeColumn, this); currentTables.add(result); if (deltaNanos < Long.MAX_VALUE) { - UpdateGraphProcessor.DEFAULT.addSource(result); + updateGraph.addSource(result); } return result; } @@ -211,7 +214,7 @@ public Table replayGrouped(Table dataSource, String timeColumn, String groupingC dataSource.getColumnSourceMap(), timeColumn, this, groupingColumn); currentTables.add(result); if (deltaNanos < Long.MAX_VALUE) { - UpdateGraphProcessor.DEFAULT.addSource(result); + updateGraph.addSource(result); } return result; } @@ -230,7 +233,7 @@ public Table replayGroupedLastBy(Table dataSource, String timeColumn, String... dataSource.getColumnSourceMap(), timeColumn, this, groupingColumns); currentTables.add(result); if (deltaNanos < Long.MAX_VALUE) { - UpdateGraphProcessor.DEFAULT.addSource(result); + updateGraph.addSource(result); } return result; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.java index 7291c54c5f3..541f815b866 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/AutoTuningIncrementalReleaseFilter.java @@ -3,10 +3,10 @@ */ package io.deephaven.engine.table.impl.select; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.TerminalNotification; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; -import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.util.annotations.ScriptApi; import java.text.DecimalFormat; @@ -198,7 +198,8 @@ public void run() { } } else { final long cycleDurationNanos = cycleEndNanos - lastRefreshNanos; - final long targetCycleNanos = UpdateGraphProcessor.DEFAULT.getTargetCycleDurationMillis() * 1000 * 1000; + final long targetCycleNanos = + updateGraph.cast().getTargetCycleDurationMillis() * 1000 * 1000; final double rowsPerNanoSecond = ((double) nextSize) / cycleDurationNanos; nextSize = Math.max((long) (rowsPerNanoSecond * targetCycleNanos * targetFactor), 1L); if (verbose) { @@ -216,13 +217,13 @@ public void run() { .append(decimalFormat.format(eta)).append(" sec").endl(); } } - UpdateGraphProcessor.DEFAULT.addNotification(new TerminalNotification() { + updateGraph.addNotification(new TerminalNotification() { final boolean captureReleasedAll = releasedAll; @Override public void run() { cycleEndNanos = System.nanoTime(); - UpdateGraphProcessor.DEFAULT.requestRefresh(); + updateGraph.requestRefresh(); if (!captureReleasedAll && releasedAll) { final DecimalFormat decimalFormat = new DecimalFormat("###,###.##"); final long durationNanos = cycleEndNanos - firstCycleNanos; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.java index 9f4c8f59155..48eadacaa59 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/BaseIncrementalReleaseFilter.java @@ -7,7 +7,6 @@ import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.rowset.RowSet; import io.deephaven.util.QueryConstants; import io.deephaven.util.annotations.ScriptApi; @@ -29,13 +28,13 @@ public abstract class BaseIncrementalReleaseFilter extends WhereFilterLivenessAr private RecomputeListener listener; private boolean releaseMoreEntries = false; - transient private boolean addedToUpdateGraphProcessor = false; + transient private boolean addedToUpdateGraph = false; private transient volatile long firstReleaseNanos = QueryConstants.NULL_LONG; private transient volatile long releaseAllNanos = QueryConstants.NULL_LONG; /** - * Should we release entries during the UpdateGraphProcessor cycle? + * Should we release entries during the PeriodicUpdateGraph cycle? */ private transient volatile boolean started; private transient volatile boolean initialized = false; @@ -67,13 +66,13 @@ public void init(TableDefinition tableDefinition) { if (!started) { return; } - addToUpdateGraphProcessor(); + addToUpdateGraph(); } - private void addToUpdateGraphProcessor() { - if (!addedToUpdateGraphProcessor) { - UpdateGraphProcessor.DEFAULT.addSource(this); - addedToUpdateGraphProcessor = true; + private void addToUpdateGraph() { + if (!addedToUpdateGraph) { + updateGraph.addSource(this); + addedToUpdateGraph = true; } } @@ -94,7 +93,7 @@ public WritableRowSet filter(RowSet selection, RowSet fullSet, Table table, bool if (fullSet.size() <= releasedSize) { onReleaseAll(); releasedSize = fullSet.size(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); listener = null; } @@ -117,14 +116,14 @@ void onReleaseAll() { */ @ScriptApi public void waitForCompletion() throws InterruptedException { - if (UpdateGraphProcessor.DEFAULT.isRefreshThread()) { + if (updateGraph.currentThreadProcessesUpdates()) { throw new IllegalStateException( - "Can not wait for completion while on UpdateGraphProcessor refresh thread, updates would block."); + "Can not wait for completion while on PeriodicUpdateGraph refresh thread, updates would block."); } if (releaseAllNanos != QueryConstants.NULL_LONG) { return; } - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(() -> { + updateGraph.exclusiveLock().doLocked(() -> { while (releaseAllNanos == QueryConstants.NULL_LONG) { // this only works because we will never actually filter out a row from the result; in the general // WhereFilter case, the result table may not update. We could await on the source table, but @@ -138,15 +137,15 @@ public void waitForCompletion() throws InterruptedException { */ @ScriptApi public void waitForCompletion(long timeoutMillis) throws InterruptedException { - if (UpdateGraphProcessor.DEFAULT.isRefreshThread()) { + if (updateGraph.currentThreadProcessesUpdates()) { throw new IllegalStateException( - "Can not wait for completion while on UpdateGraphProcessor refresh thread, updates would block."); + "Can not wait for completion while on PeriodicUpdateGraph refresh thread, updates would block."); } if (releaseAllNanos != QueryConstants.NULL_LONG) { return; } final long end = System.currentTimeMillis() + timeoutMillis; - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(() -> { + updateGraph.exclusiveLock().doLocked(() -> { while (releaseAllNanos == QueryConstants.NULL_LONG) { // this only works because we will never actually filter out a row from the result; in the general // WhereFilter case, the result table may not update. We could await on the source table, but @@ -178,7 +177,7 @@ public long durationNanos() { public void start() { started = true; if (initialized) { - addToUpdateGraphProcessor(); + addToUpdateGraph(); } } @@ -234,7 +233,7 @@ public void run() { @Override protected void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ClockFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ClockFilter.java index 1ae24fe23f1..293096f4993 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ClockFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/ClockFilter.java @@ -13,7 +13,6 @@ import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.table.impl.lang.QueryLanguageFunctionUtils; import io.deephaven.engine.table.impl.sources.ReinterpretUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.DynamicNode; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.ColumnSource; @@ -93,7 +92,7 @@ public final void setRecomputeListener(@NotNull final RecomputeListener listener if (!refreshing) { return; } - UpdateGraphProcessor.DEFAULT.addSource(this); + updateGraph.addSource(this); this.resultTable = listener.getTable(); listener.setIsRefreshing(true); } @@ -101,7 +100,7 @@ public final void setRecomputeListener(@NotNull final RecomputeListener listener @Override protected void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/DynamicWhereFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/DynamicWhereFilter.java index f47afd266ac..2dd4ff39b16 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/DynamicWhereFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/DynamicWhereFilter.java @@ -10,7 +10,6 @@ import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.*; import io.deephaven.engine.table.impl.indexer.RowSetIndexer; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.NotificationQueue; import io.deephaven.engine.updategraph.DynamicNode; import io.deephaven.engine.table.impl.*; @@ -19,6 +18,7 @@ import io.deephaven.chunk.WritableBooleanChunk; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.engine.table.impl.TupleSourceFactory; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.io.log.impl.LogOutputStringImpl; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import org.apache.commons.lang3.mutable.MutableBoolean; @@ -57,7 +57,7 @@ public class DynamicWhereFilter extends WhereFilterLivenessArtifactImpl implemen public DynamicWhereFilter(final QueryTable setTable, final boolean inclusion, final MatchPair... setColumnsNames) { setRefreshing = setTable.isRefreshing(); if (setRefreshing) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + updateGraph.checkInitiateSerialTableOperation(); } this.matchPairs = setColumnsNames; @@ -139,6 +139,11 @@ public void onFailureInternal(Throwable originalException, Entry sourceEntry) { } } + @Override + public UpdateGraph getUpdateGraph() { + return updateGraph; + } + private Object makeKey(long index) { return makeKey(setTupleSource, index); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/RollingReleaseFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/RollingReleaseFilter.java index d79880e4f0d..72f0992b79a 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/RollingReleaseFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/RollingReleaseFilter.java @@ -8,7 +8,6 @@ import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.rowset.RowSet; import java.util.Collections; @@ -79,7 +78,7 @@ public void setRecomputeListener(RecomputeListener listener) { Assert.eqNull(this.listener, "this.listener"); this.listener = listener; listener.setIsRefreshing(true); - UpdateGraphProcessor.DEFAULT.addSource(this); + updateGraph.addSource(this); } @Override @@ -101,7 +100,7 @@ public void run() { @Override protected void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/SimulationClock.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/SimulationClock.java index 69c4b1f86ad..0e1e3182ddd 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/SimulationClock.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/SimulationClock.java @@ -6,8 +6,9 @@ import io.deephaven.base.clock.Clock; import io.deephaven.base.verify.Assert; import io.deephaven.base.verify.Require; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.util.annotations.VisibleForTesting; import org.jetbrains.annotations.NotNull; @@ -31,7 +32,9 @@ private enum State { } private final AtomicReference state = new AtomicReference<>(State.NOT_STARTED); - private final Condition ugpCondition = UpdateGraphProcessor.DEFAULT.exclusiveLock().newCondition(); + private final PeriodicUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + + private final Condition ugpCondition = updateGraph.exclusiveLock().newCondition(); private Instant now; @@ -107,12 +110,12 @@ public void start() { */ public void start(final boolean maxSpeed) { if (maxSpeed) { - UpdateGraphProcessor.DEFAULT.setTargetCycleDurationMillis(0); + updateGraph.setTargetCycleDurationMillis(0); } if (!state.compareAndSet(State.NOT_STARTED, State.STARTED)) { throw new IllegalStateException(this + " already started"); } - UpdateGraphProcessor.DEFAULT.addSource(refreshTask); + updateGraph.addSource(refreshTask); } /** @@ -124,8 +127,8 @@ public void advance() { if (DateTimeUtils.epochNanos(now) == DateTimeUtils.epochNanos(endTime)) { Assert.assertion(state.compareAndSet(State.STARTED, State.DONE), "state.compareAndSet(State.STARTED, State.DONE)"); - UpdateGraphProcessor.DEFAULT.removeSource(refreshTask); - UpdateGraphProcessor.DEFAULT.requestSignal(ugpCondition); + updateGraph.removeSource(refreshTask); + updateGraph.requestSignal(ugpCondition); return; // This return is not strictly necessary, but it seems clearer this way. } final Instant incremented = DateTimeUtils.plus(now, stepNanos); @@ -146,7 +149,7 @@ public boolean done() { */ public void awaitDoneUninterruptibly() { while (!done()) { - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(ugpCondition::awaitUninterruptibly); + updateGraph.exclusiveLock().doLocked(ugpCondition::awaitUninterruptibly); } } @@ -155,7 +158,7 @@ public void awaitDoneUninterruptibly() { */ public void awaitDone() throws InterruptedException { while (!done()) { - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(ugpCondition::await); + updateGraph.exclusiveLock().doLocked(ugpCondition::await); } } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/TimeSeriesFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/TimeSeriesFilter.java index f7c4a65d773..5f6052c93f1 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/TimeSeriesFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/TimeSeriesFilter.java @@ -2,10 +2,6 @@ * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending */ -/* - * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending - */ - package io.deephaven.engine.table.impl.select; import io.deephaven.base.clock.Clock; @@ -18,7 +14,6 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.ColumnSource; import java.time.Instant; @@ -99,7 +94,7 @@ public void setRecomputeListener(RecomputeListener listener) { Assert.eqNull(this.listener, "this.listener"); this.listener = listener; listener.setIsRefreshing(true); - UpdateGraphProcessor.DEFAULT.addSource(this); + updateGraph.addSource(this); } @Override @@ -120,6 +115,6 @@ public void run() { @Override protected void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/WhereFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/WhereFilter.java index a5ddcd8c591..5ad13e876ed 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/WhereFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/WhereFilter.java @@ -126,7 +126,7 @@ default void validateSafeForRefresh(final BaseTable sourceTable) { * @param table the table to filter * @param usePrev true if previous values should be used. Implementing previous value filtering is optional, and a * {@link PreviousFilteringNotSupported} exception may be thrown. If a PreviousFiltering exception is thrown, - * then the caller must acquire the UpdateGraphProcessor lock. + * then the caller must acquire the PeriodicUpdateGraph lock. * * @return The subset of selection accepted by this filter; ownership passes to the caller */ @@ -157,7 +157,7 @@ default void validateSafeForRefresh(final BaseTable sourceTable) { * @param table the table to filter * @param usePrev true if previous values should be used. Implementing previous value filtering is optional, and a * {@link PreviousFilteringNotSupported} exception may be thrown. If a PreviousFiltering exception is thrown, - * then the caller must acquire the UpdateGraphProcessor lock. + * then the caller must acquire the PeriodicUpdateGraph lock. * * @return The subset of selection not accepted by this filter; ownership passes to the caller */ @@ -179,7 +179,7 @@ default WritableRowSet filterInverse(RowSet selection, RowSet fullSet, Table tab * @param table the table to filter * @param usePrev true if previous values should be used. Implementing previous value filtering is optional, and a * {@link PreviousFilteringNotSupported} exception may be thrown. If a PreviousFiltering exception is thrown, - * then the caller must acquire the UpdateGraphProcessor lock. + * then the caller must acquire the PeriodicUpdateGraph lock. * @param invert if the filter should be inverted * @return The subset of selection; ownership passes to the caller */ diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/WhereFilterLivenessArtifactImpl.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/WhereFilterLivenessArtifactImpl.java index dcde36b4bf3..28f6ab86300 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/WhereFilterLivenessArtifactImpl.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/WhereFilterLivenessArtifactImpl.java @@ -4,13 +4,22 @@ package io.deephaven.engine.table.impl.select; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.liveness.LivenessArtifact; import java.io.Serializable; public abstract class WhereFilterLivenessArtifactImpl extends LivenessArtifact implements WhereFilter, Serializable { + + protected final UpdateGraph updateGraph; + private boolean isAutomatedFilter = false; + public WhereFilterLivenessArtifactImpl() { + updateGraph = ExecutionContext.getContext().getUpdateGraph(); + } + @Override public boolean isAutomatedFilter() { return isAutomatedFilter; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectAndViewAnalyzer.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectAndViewAnalyzer.java index 3bdaa627f8a..eb34118632c 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectAndViewAnalyzer.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectAndViewAnalyzer.java @@ -24,6 +24,7 @@ import io.deephaven.engine.table.impl.util.JobScheduler; import io.deephaven.engine.table.impl.util.RowRedirection; import io.deephaven.engine.table.impl.util.WritableRowRedirection; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.io.log.impl.LogOutputStringImpl; import io.deephaven.util.SafeCloseable; import io.deephaven.util.SafeCloseablePair; @@ -69,6 +70,7 @@ public static SelectAndViewAnalyzerWrapper create( boolean useShiftedColumns, final boolean allowInternalFlatten, final SelectColumn... selectColumns) { + final UpdateGraph updateGraph = sourceTable.getUpdateGraph(); SelectAndViewAnalyzer analyzer = createBaseLayer(columnSources, publishTheseSources); final Map> columnDefinitions = new LinkedHashMap<>(); final RowRedirection rowRedirection; @@ -187,9 +189,8 @@ public static SelectAndViewAnalyzerWrapper create( final WritableColumnSource scs = flatResult || flattenedResult ? sc.newFlatDestInstance(targetDestinationCapacity) : sc.newDestInstance(targetDestinationCapacity); - analyzer = - analyzer.createLayerForSelect(rowSet, sc.getName(), sc, scs, null, distinctDeps, mcsBuilder, - false, flattenedResult, flatResult && flattenedResult); + analyzer = analyzer.createLayerForSelect(updateGraph, rowSet, sc.getName(), sc, scs, null, + distinctDeps, mcsBuilder, false, flattenedResult, flatResult && flattenedResult); if (flattenedResult) { numberOfInternallyFlattenedColumns++; } @@ -199,9 +200,8 @@ public static SelectAndViewAnalyzerWrapper create( final WritableColumnSource underlyingSource = sc.newDestInstance(rowSet.size()); final WritableColumnSource scs = WritableRedirectedColumnSource.maybeRedirect( rowRedirection, underlyingSource, rowSet.size()); - analyzer = - analyzer.createLayerForSelect(rowSet, sc.getName(), sc, scs, underlyingSource, distinctDeps, - mcsBuilder, true, false, false); + analyzer = analyzer.createLayerForSelect(updateGraph, rowSet, sc.getName(), sc, scs, + underlyingSource, distinctDeps, mcsBuilder, true, false, false); break; } case SELECT_REDIRECTED_REFRESHING: @@ -216,9 +216,8 @@ public static SelectAndViewAnalyzerWrapper create( scs = WritableRedirectedColumnSource.maybeRedirect( rowRedirection, underlyingSource, rowSet.intSize()); } - analyzer = - analyzer.createLayerForSelect(rowSet, sc.getName(), sc, scs, underlyingSource, distinctDeps, - mcsBuilder, rowRedirection != null, false, false); + analyzer = analyzer.createLayerForSelect(updateGraph, rowSet, sc.getName(), sc, scs, + underlyingSource, distinctDeps, mcsBuilder, rowRedirection != null, false, false); break; } default: @@ -329,11 +328,12 @@ private StaticFlattenLayer createStaticFlattenLayer(TrackingRowSet parentRowSet) return new StaticFlattenLayer(this, parentRowSet); } - private SelectAndViewAnalyzer createLayerForSelect(RowSet parentRowset, String name, SelectColumn sc, - WritableColumnSource cs, WritableColumnSource underlyingSource, - String[] parentColumnDependencies, ModifiedColumnSet mcsBuilder, boolean isRedirected, - boolean flattenResult, boolean alreadyFlattened) { - return new SelectColumnLayer(parentRowset, this, name, sc, cs, underlyingSource, parentColumnDependencies, + private SelectAndViewAnalyzer createLayerForSelect( + UpdateGraph updateGraph, RowSet parentRowset, String name, SelectColumn sc, WritableColumnSource cs, + WritableColumnSource underlyingSource, String[] parentColumnDependencies, ModifiedColumnSet mcsBuilder, + boolean isRedirected, boolean flattenResult, boolean alreadyFlattened) { + return new SelectColumnLayer(updateGraph, parentRowset, this, name, sc, cs, underlyingSource, + parentColumnDependencies, mcsBuilder, isRedirected, flattenResult, alreadyFlattened); } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java index e2b5a540eeb..fc936a6e489 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/analyzers/SelectColumnLayer.java @@ -25,7 +25,7 @@ import io.deephaven.engine.table.impl.util.JobScheduler; import io.deephaven.engine.updategraph.DynamicNode; import io.deephaven.engine.updategraph.UpdateCommitterEx; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; @@ -41,7 +41,7 @@ final public class SelectColumnLayer extends SelectOrViewColumnLayer { /** - * The same reference as super.columnSource, but as a WritableColumnSource and maybe reinterpretted + * The same reference as super.columnSource, but as a WritableColumnSource and maybe reinterpreted */ private final WritableColumnSource writableSource; @@ -50,6 +50,7 @@ final public class SelectColumnLayer extends SelectOrViewColumnLayer { */ private final ExecutionContext executionContext; + private final UpdateGraph updateGraph; /** * Our parent row set, used for ensuring capacity. */ @@ -71,15 +72,23 @@ final public class SelectColumnLayer extends SelectOrViewColumnLayer { */ private ChunkSource.WithPrev chunkSource; - SelectColumnLayer(RowSet parentRowSet, SelectAndViewAnalyzer inner, String name, SelectColumn sc, - WritableColumnSource ws, WritableColumnSource underlying, - String[] deps, ModifiedColumnSet mcsBuilder, boolean isRedirected, - boolean flattenedResult, boolean alreadyFlattenedSources) { + SelectColumnLayer( + UpdateGraph updateGraph, RowSet parentRowSet, SelectAndViewAnalyzer inner, String name, SelectColumn sc, + WritableColumnSource ws, WritableColumnSource underlying, String[] deps, ModifiedColumnSet mcsBuilder, + boolean isRedirected, boolean flattenedResult, boolean alreadyFlattenedSources) { super(inner, name, sc, ws, underlying, deps, mcsBuilder); + this.updateGraph = updateGraph; this.parentRowSet = parentRowSet; this.writableSource = ReinterpretUtils.maybeConvertToWritablePrimitive(ws); this.isRedirected = isRedirected; - this.executionContext = ExecutionContext.getContextToRecord(); + + final ExecutionContext userSuppliedContext = ExecutionContext.getContextToRecord(); + if (userSuppliedContext != null) { + this.executionContext = userSuppliedContext; + } else { + // the job scheduler requires the update graph + this.executionContext = ExecutionContext.newBuilder().setUpdateGraph(updateGraph).build(); + } dependencyBitSet = new BitSet(); Arrays.stream(deps).mapToInt(inner::getLayerIndexFor).forEach(dependencyBitSet::set); @@ -149,10 +158,9 @@ public void onAllRequiredColumnsCompleted() { // If we have shifts, that makes everything nasty; so we do not want to deal with it final boolean hasShifts = upstream.shifted().nonempty(); - final boolean checkTableOperations = - UpdateGraphProcessor.DEFAULT.getCheckTableOperations() - && !UpdateGraphProcessor.DEFAULT.sharedLock().isHeldByCurrentThread() - && !UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread(); + final boolean serialTableOperationsSafe = updateGraph.serialTableOperationsSafe() + || updateGraph.sharedLock().isHeldByCurrentThread() + || updateGraph.exclusiveLock().isHeldByCurrentThread(); if (canParallelizeThisColumn && jobScheduler.threadCount() > 1 && !hasShifts && ((resultTypeIsTable && totalSize > 0) @@ -196,13 +204,13 @@ public void onAllRequiredColumnsCompleted() { executionContext, () -> prepareParallelUpdate(jobScheduler, upstream, toClear, helper, liveResultOwner, onCompletion, this::onError, updates, - checkTableOperations), + serialTableOperationsSafe), SelectColumnLayer.this, this::onError); } else { jobScheduler.submit( executionContext, () -> doSerialApplyUpdate(upstream, toClear, helper, liveResultOwner, onCompletion, - checkTableOperations), + serialTableOperationsSafe), SelectColumnLayer.this, this::onError); } } @@ -212,7 +220,7 @@ public void onAllRequiredColumnsCompleted() { private void prepareParallelUpdate(final JobScheduler jobScheduler, final TableUpdate upstream, final RowSet toClear, final UpdateHelper helper, @Nullable final LivenessNode liveResultOwner, final SelectLayerCompletionHandler onCompletion, final Consumer onError, - final List splitUpdates, final boolean checkTableOperations) { + final List splitUpdates, final boolean serialTableOperationsSafe) { // we have to do removal and previous initialization before we can do any of the actual filling in multiple // threads to avoid concurrency problems with our destination column sources doEnsureCapacity(); @@ -234,9 +242,9 @@ private void prepareParallelUpdate(final JobScheduler jobScheduler, final TableU } } jobScheduler.iterateParallel( - executionContext, SelectColumnLayer.this, JobScheduler.DEFAULT_CONTEXT_FACTORY, 0, numTasks, - (ctx, ti, nec) -> doParallelApplyUpdate( - splitUpdates.get(ti), helper, liveResultOwner, checkTableOperations, destinationOffsets[ti]), + executionContext, SelectColumnLayer.this, JobScheduler.DEFAULT_CONTEXT_FACTORY, 0, + numTasks, (ctx, ti, nec) -> doParallelApplyUpdate(splitUpdates.get(ti), helper, liveResultOwner, + serialTableOperationsSafe, destinationOffsets[ti]), () -> { if (!isRedirected) { clearObjectsAtThisLevel(toClear); @@ -248,14 +256,14 @@ private void prepareParallelUpdate(final JobScheduler jobScheduler, final TableU private void doSerialApplyUpdate(final TableUpdate upstream, final RowSet toClear, final UpdateHelper helper, @Nullable final LivenessNode liveResultOwner, final SelectLayerCompletionHandler onCompletion, - final boolean checkTableOperations) { + final boolean serialTableOperationsSafe) { doEnsureCapacity(); - final boolean oldCheck = UpdateGraphProcessor.DEFAULT.setCheckTableOperations(checkTableOperations); + final boolean oldSafe = updateGraph.setSerialTableOperationsSafe(serialTableOperationsSafe); try { SystemicObjectTracker.executeSystemically(isSystemic, () -> doApplyUpdate(upstream, helper, liveResultOwner, 0)); } finally { - UpdateGraphProcessor.DEFAULT.setCheckTableOperations(oldCheck); + updateGraph.setSerialTableOperationsSafe(oldSafe); } if (!isRedirected) { clearObjectsAtThisLevel(toClear); @@ -264,13 +272,14 @@ private void doSerialApplyUpdate(final TableUpdate upstream, final RowSet toClea } private void doParallelApplyUpdate(final TableUpdate upstream, final UpdateHelper helper, - @Nullable final LivenessNode liveResultOwner, final boolean checkTableOperations, final long startOffset) { - final boolean oldCheck = UpdateGraphProcessor.DEFAULT.setCheckTableOperations(checkTableOperations); + @Nullable final LivenessNode liveResultOwner, final boolean serialTableOperationsSafe, + final long startOffset) { + final boolean oldSafe = updateGraph.setSerialTableOperationsSafe(serialTableOperationsSafe); try { SystemicObjectTracker.executeSystemically(isSystemic, () -> doApplyUpdate(upstream, helper, liveResultOwner, startOffset)); } finally { - UpdateGraphProcessor.DEFAULT.setCheckTableOperations(oldCheck); + updateGraph.setSerialTableOperationsSafe(oldSafe); } upstream.release(); } @@ -517,7 +526,7 @@ private synchronized void addToPrevUnmanager( @NotNull final LivenessNode liveResultOwner, @NotNull final WritableObjectChunk prevValuesToUnmanage) { if (prevUnmanager == null) { - prevUnmanager = new UpdateCommitterEx<>(this, SelectColumnLayer::unmanagePreviousValues); + prevUnmanager = new UpdateCommitterEx<>(this, updateGraph, SelectColumnLayer::unmanagePreviousValues); } prevUnmanager.maybeActivate(liveResultOwner); if (prevValueChunksToUnmanage == null) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/formula/FormulaFactory.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/formula/FormulaFactory.java index 8ca2cecb9b3..8712bf97279 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/select/formula/FormulaFactory.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/select/formula/FormulaFactory.java @@ -7,7 +7,6 @@ import io.deephaven.engine.table.impl.select.Formula; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.rowset.TrackingRowSet; -import io.deephaven.engine.context.ExecutionContext; import java.util.Map; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java index 8f294d3d49d..b0cb7a9e796 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ArraySourceHelper.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; @@ -142,7 +143,7 @@ final boolean shouldRecordPrevious(final long key, final UArray[] prevBlocks, if (prevFlusher == null) { return false; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block = (int) (key >> LOG_BLOCK_SIZE); @@ -184,7 +185,8 @@ final void startTrackingPrev(int numBlocks) { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, ArraySourceHelper::commitBlocks); + prevFlusher = new UpdateCommitter<>(this, updateGraph, + ArraySourceHelper::commitBlocks); prevInUse = new long[numBlocks][]; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanArraySource.java index 1841e9a4516..9618ebdc540 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanArraySource.java @@ -4,6 +4,7 @@ package io.deephaven.engine.table.impl.sources; import gnu.trove.list.array.TIntArrayList; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; @@ -11,7 +12,6 @@ import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.BooleanUtils; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.RowKeys; @@ -58,7 +58,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedIndices) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -408,7 +408,8 @@ private interface Reader { private void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src, Reader reader) { final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -458,7 +459,8 @@ private void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk src, Reader reader) { final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -504,7 +506,7 @@ private void fillFromChunkUnordered(@NotNull Chunk src, @NotNu if (keys.size() == 0) { return; } - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSingleValueSource.java index 2769eeb93ad..d763de7cea6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSingleValueSource.java @@ -13,8 +13,8 @@ import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.Chunk; import io.deephaven.chunk.LongChunk; @@ -47,7 +47,7 @@ public BooleanSingleValueSource() { @Override public final void set(Boolean value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -82,7 +82,7 @@ public final Boolean getPrev(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return NULL_BOOLEAN; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java index 366f3874186..4366dbfcff3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/BooleanSparseArraySource.java @@ -14,6 +14,8 @@ import static io.deephaven.util.BooleanUtils.NULL_BOOLEAN_AS_BYTE; import io.deephaven.engine.table.WritableSourceWithPrepareForParallelPopulation; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -21,9 +23,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.ByteOneOrN; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; @@ -275,7 +275,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -284,7 +284,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, BooleanSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, BooleanSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -399,7 +399,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -423,7 +423,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -750,7 +750,7 @@ private boolean shouldUsePrevious(final long rowKey) { // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -946,7 +946,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final ObjectChunk chunk = src.asObjectChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java index 38073c6b877..8742a5c7429 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteArraySource.java @@ -12,6 +12,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; @@ -19,7 +20,6 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.ByteComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -73,7 +73,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -569,7 +569,8 @@ private interface CopyFromBlockFunctor { // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -653,7 +654,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -706,7 +708,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk chunk = src.asByteChunk(); // endregion chunkDecl - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSingleValueSource.java index 7a5c4adb0b4..336af6727b0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSingleValueSource.java @@ -11,8 +11,8 @@ import io.deephaven.chunk.WritableByteChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.ByteChunk; import io.deephaven.chunk.Chunk; @@ -47,7 +47,7 @@ public ByteSingleValueSource() { @Override public final void set(Byte value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -60,7 +60,7 @@ public final void set(Byte value) { @Override public final void set(byte value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -93,7 +93,7 @@ public final byte getPrevByte(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return NULL_BYTE; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java index f61d905ef53..092d7aea366 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ByteSparseArraySource.java @@ -8,6 +8,8 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -15,9 +17,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.ByteOneOrN; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; @@ -269,7 +269,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -278,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, ByteSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, ByteSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -393,7 +393,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -417,7 +417,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -740,7 +740,7 @@ private boolean shouldUsePrevious(final long rowKey) { // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -936,7 +936,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final ByteChunk chunk = src.asByteChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java index 26600c9e970..ade3bae143b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterArraySource.java @@ -7,6 +7,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; @@ -14,7 +15,6 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.CharComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -68,7 +68,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -564,7 +564,8 @@ private interface CopyFromBlockFunctor { // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -648,7 +649,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -701,7 +703,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk chunk = src.asCharChunk(); // endregion chunkDecl - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSingleValueSource.java index f840581c4e8..75b1af8ccae 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSingleValueSource.java @@ -6,8 +6,8 @@ import io.deephaven.chunk.WritableCharChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.CharChunk; import io.deephaven.chunk.Chunk; @@ -42,7 +42,7 @@ public CharacterSingleValueSource() { @Override public final void set(Character value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -55,7 +55,7 @@ public final void set(Character value) { @Override public final void set(char value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -88,7 +88,7 @@ public final char getPrevChar(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return NULL_CHAR; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java index 2acf8a765c2..84ba20330b6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/CharacterSparseArraySource.java @@ -3,6 +3,8 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -10,9 +12,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.CharOneOrN; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; @@ -264,7 +264,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -273,7 +273,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, CharacterSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, CharacterSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -388,7 +388,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -412,7 +412,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -735,7 +735,7 @@ private boolean shouldUsePrevious(final long rowKey) { // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -931,7 +931,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final CharChunk chunk = src.asCharChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java index 2ad909f83ef..7f042028274 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleArraySource.java @@ -12,6 +12,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; @@ -19,7 +20,6 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.DoubleComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -73,7 +73,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -569,7 +569,8 @@ private interface CopyFromBlockFunctor { // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -653,7 +654,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -706,7 +708,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk chunk = src.asDoubleChunk(); // endregion chunkDecl - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSingleValueSource.java index 92d222ba809..8757058f566 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSingleValueSource.java @@ -11,8 +11,8 @@ import io.deephaven.chunk.WritableDoubleChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.DoubleChunk; import io.deephaven.chunk.Chunk; @@ -47,7 +47,7 @@ public DoubleSingleValueSource() { @Override public final void set(Double value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -60,7 +60,7 @@ public final void set(Double value) { @Override public final void set(double value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -93,7 +93,7 @@ public final double getPrevDouble(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return NULL_DOUBLE; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java index 3fb958820ab..141d4013f8b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/DoubleSparseArraySource.java @@ -8,6 +8,8 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -15,9 +17,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.DoubleOneOrN; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; @@ -269,7 +269,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -278,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, DoubleSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, DoubleSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -393,7 +393,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -417,7 +417,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -740,7 +740,7 @@ private boolean shouldUsePrevious(final long rowKey) { // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -936,7 +936,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final DoubleChunk chunk = src.asDoubleChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java index e9d8a04ada6..3c7845aa663 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatArraySource.java @@ -12,6 +12,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; @@ -19,7 +20,6 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.FloatComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -73,7 +73,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -569,7 +569,8 @@ private interface CopyFromBlockFunctor { // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -653,7 +654,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -706,7 +708,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk chunk = src.asFloatChunk(); // endregion chunkDecl - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSingleValueSource.java index 3470f26096e..14768822f07 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSingleValueSource.java @@ -11,8 +11,8 @@ import io.deephaven.chunk.WritableFloatChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.FloatChunk; import io.deephaven.chunk.Chunk; @@ -47,7 +47,7 @@ public FloatSingleValueSource() { @Override public final void set(Float value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -60,7 +60,7 @@ public final void set(Float value) { @Override public final void set(float value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -93,7 +93,7 @@ public final float getPrevFloat(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return NULL_FLOAT; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java index c18e3fbda3d..6dcbd8e439b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/FloatSparseArraySource.java @@ -8,6 +8,8 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -15,9 +17,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.FloatOneOrN; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; @@ -269,7 +269,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -278,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, FloatSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, FloatSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -393,7 +393,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -417,7 +417,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -740,7 +740,7 @@ private boolean shouldUsePrevious(final long rowKey) { // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -936,7 +936,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final FloatChunk chunk = src.asFloatChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java index e573666459d..b0d678656a3 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerArraySource.java @@ -12,6 +12,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; @@ -19,7 +20,6 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.IntComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -73,7 +73,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -569,7 +569,8 @@ private interface CopyFromBlockFunctor { // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -653,7 +654,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -706,7 +708,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk chunk = src.asIntChunk(); // endregion chunkDecl - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSingleValueSource.java index 089ade8cbb1..8beb01857da 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSingleValueSource.java @@ -11,8 +11,8 @@ import io.deephaven.chunk.WritableIntChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.IntChunk; import io.deephaven.chunk.Chunk; @@ -47,7 +47,7 @@ public IntegerSingleValueSource() { @Override public final void set(Integer value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -60,7 +60,7 @@ public final void set(Integer value) { @Override public final void set(int value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -93,7 +93,7 @@ public final int getPrevInt(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return NULL_INT; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java index 24112b636b7..9811faed021 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/IntegerSparseArraySource.java @@ -8,6 +8,8 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -15,9 +17,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.IntOneOrN; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; @@ -269,7 +269,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -278,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, IntegerSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, IntegerSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -393,7 +393,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -417,7 +417,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -740,7 +740,7 @@ private boolean shouldUsePrevious(final long rowKey) { // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -936,7 +936,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final IntChunk chunk = src.asIntChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java index 2d57cfd8cb7..71fbd6f3e73 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongArraySource.java @@ -23,6 +23,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; @@ -30,7 +31,6 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.LongComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -84,7 +84,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -863,7 +863,8 @@ protected void fillSparsePrevChunkUnordered( // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -921,7 +922,8 @@ void fillFromChunkByRanges( // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -1007,7 +1009,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -1057,7 +1060,8 @@ void fillFromChunkByKeys( // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -1110,7 +1114,8 @@ void fillFromChunkByKeys( final LongChunk chunk = src.asLongChunk(); // endregion chunkDecl - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -1157,7 +1162,8 @@ public void fillFromChunkUnordered( final ObjectChunk chunk = src.asObjectChunk(); // endregion chunkDecl - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSingleValueSource.java index 73bda8c4713..a88f8ae5392 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSingleValueSource.java @@ -11,8 +11,8 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.Chunk; @@ -47,7 +47,7 @@ public LongSingleValueSource() { @Override public final void set(Long value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -60,7 +60,7 @@ public final void set(Long value) { @Override public final void set(long value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -93,7 +93,7 @@ public final long getPrevLong(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return NULL_LONG; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java index 91427432edf..6861f57f143 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/LongSparseArraySource.java @@ -17,6 +17,8 @@ import io.deephaven.base.verify.Require; import java.time.ZoneId; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -24,9 +26,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; import io.deephaven.engine.rowset.RowSequence; @@ -277,7 +277,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -286,7 +286,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, LongSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, LongSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -401,7 +401,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -425,7 +425,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -977,7 +977,7 @@ void fillFromChunkByRanges( // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -1039,7 +1039,7 @@ void fillFromChunkByKeys( // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -1235,7 +1235,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final LongChunk chunk = src.asLongChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -1294,7 +1294,7 @@ public void fillFromChunkUnordered( final ObjectChunk chunk = src.asObjectChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectArraySource.java index 521fecefae6..b42c4592a1e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectArraySource.java @@ -5,10 +5,10 @@ import gnu.trove.list.array.TIntArrayList; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.vector.Vector; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.RowKeys; @@ -65,7 +65,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedIndices) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -341,7 +341,7 @@ void fillFromChunkByRanges(@NotNull RowSequence rowSequence, Chunk chunk = src.asObjectChunk(); final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -416,7 +416,7 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk chunk = src.asObjectChunk(); final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -456,7 +456,7 @@ void fillFromChunkByKeys(@NotNull RowSequence rowSequence, Chunk src, @NotNull LongChunk keys) { final ObjectChunk chunk = src.asObjectChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSingleValueSource.java index d661826fb14..c8465fa6ef0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSingleValueSource.java @@ -11,8 +11,8 @@ import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.Chunk; @@ -46,7 +46,7 @@ public ObjectSingleValueSource(Class type) { @Override public final void set(T value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -81,7 +81,7 @@ public final T getPrev(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return null; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java index 9a47d2452b2..877f2fd997f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ObjectSparseArraySource.java @@ -8,6 +8,8 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -15,9 +17,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.ObjectOneOrN; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; @@ -256,7 +256,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -265,7 +265,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, ObjectSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, ObjectSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -380,7 +380,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -404,7 +404,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -727,7 +727,7 @@ private boolean shouldUsePrevious(final long rowKey) { // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -923,7 +923,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final ObjectChunk chunk = src.asObjectChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java index 16b3918181c..0ad59c51294 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortArraySource.java @@ -12,6 +12,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; @@ -19,7 +20,6 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.util.SoftRecycler; import io.deephaven.util.compare.ShortComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -73,7 +73,7 @@ public void ensureCapacity(long capacity, boolean nullFill) { */ @Override public void prepareForParallelPopulation(RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (ensurePreviousClockCycle == currentStep) { throw new IllegalStateException("May not call ensurePrevious twice on one clock cycle!"); } @@ -569,7 +569,8 @@ private interface CopyFromBlockFunctor { // endregion chunkDecl final LongChunk ranges = rowSequence.asRowKeyRangesChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -653,7 +654,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -706,7 +708,8 @@ public void copyFromChunk(long firstKey, final long totalLength, final Chunk chunk = src.asShortChunk(); // endregion chunkDecl - final boolean trackPrevious = prevFlusher != null && ensurePreviousClockCycle != LogicalClock.DEFAULT.currentStep(); + final boolean trackPrevious = prevFlusher != null && + ensurePreviousClockCycle != updateGraph.clock().currentStep(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSingleValueSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSingleValueSource.java index 7222ddb43cd..e0f21ee2113 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSingleValueSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSingleValueSource.java @@ -11,8 +11,8 @@ import io.deephaven.chunk.WritableShortChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.chunk.ShortChunk; import io.deephaven.chunk.Chunk; @@ -47,7 +47,7 @@ public ShortSingleValueSource() { @Override public final void set(Short value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -60,7 +60,7 @@ public final void set(Short value) { @Override public final void set(short value) { if (isTrackingPrevValues) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (changeTime < currentStep) { prev = current; changeTime = currentStep; @@ -93,7 +93,7 @@ public final short getPrevShort(long rowKey) { if (rowKey == RowSequence.NULL_ROW_KEY) { return NULL_SHORT; } - if (!isTrackingPrevValues || changeTime < LogicalClock.DEFAULT.currentStep()) { + if (!isTrackingPrevValues || changeTime < updateGraph.clock().currentStep()) { return current; } return prev; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java index 9674704fb6c..1fe2101b07d 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ShortSparseArraySource.java @@ -8,6 +8,8 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; @@ -15,9 +17,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.engine.table.impl.sources.sparse.ShortOneOrN; import io.deephaven.engine.table.impl.sources.sparse.LongOneOrN; @@ -269,7 +269,7 @@ private boolean shouldTrackPrevious() { // prevFlusher == null means we are not tracking previous values yet (or maybe ever). // If prepareForParallelPopulation was called on this cycle, it's assumed that all previous values have already // been recorded. - return prevFlusher != null && prepareForParallelPopulationClockCycle != LogicalClock.DEFAULT.currentStep(); + return prevFlusher != null && prepareForParallelPopulationClockCycle != updateGraph.clock().currentStep(); } @Override @@ -278,7 +278,7 @@ public void startTrackingPrevValues() { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + this.getClass().getCanonicalName()); } - prevFlusher = new UpdateCommitter<>(this, ShortSparseArraySource::commitUpdates); + prevFlusher = new UpdateCommitter<>(this, updateGraph, ShortSparseArraySource::commitUpdates); } private void commitUpdates() { @@ -393,7 +393,7 @@ private void commitUpdates() { if (!shouldTrackPrevious()) { return null; } - // If we want to track previous values, we make sure we are registered with the UpdateGraphProcessor. + // If we want to track previous values, we make sure we are registered with the PeriodicUpdateGraph. prevFlusher.maybeActivate(); final int block0 = (int) (key >> BLOCK0_SHIFT) & BLOCK0_MASK; @@ -417,7 +417,7 @@ private void commitUpdates() { @Override public void prepareForParallelPopulation(final RowSequence changedRows) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (prepareForParallelPopulationClockCycle == currentStep) { throw new IllegalStateException("May not call prepareForParallelPopulation twice on one clock cycle!"); } @@ -740,7 +740,7 @@ private boolean shouldUsePrevious(final long rowKey) { // endregion chunkDecl final LongChunk keys = rowSequence.asRowKeyChunk(); - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); @@ -936,7 +936,7 @@ void nullByKeys(@NotNull final RowSequence rowSequence) { final ShortChunk chunk = src.asShortChunk(); // endregion chunkDecl - final boolean trackPrevious = shouldTrackPrevious();; + final boolean trackPrevious = shouldTrackPrevious(); if (trackPrevious) { prevFlusher.maybeActivate(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SwitchColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SwitchColumnSource.java index 3e329e52940..6fc22a47a9f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SwitchColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/SwitchColumnSource.java @@ -6,13 +6,13 @@ import io.deephaven.base.verify.Assert; import io.deephaven.base.verify.Require; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Context; import io.deephaven.engine.table.SharedContext; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.AbstractColumnSource; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; @@ -36,7 +36,7 @@ public SwitchColumnSource(ColumnSource currentSource) { public SwitchColumnSource(@NotNull final ColumnSource currentSource, @Nullable final Consumer> onPreviousCommitted) { super(currentSource.getType(), currentSource.getComponentType()); - this.updateCommitter = new UpdateCommitter<>(this, SwitchColumnSource::clearPrevious); + this.updateCommitter = new UpdateCommitter<>(this, updateGraph, SwitchColumnSource::clearPrevious); this.onPreviousCommitted = onPreviousCommitted; this.currentSource = currentSource; } @@ -55,7 +55,7 @@ public void setNewCurrent(ColumnSource newCurrent) { Assert.eq(newCurrent.getComponentType(), "newCurrent.getComponentType()", getComponentType(), "getComponentType()"); prevSource = currentSource; - prevValidityStep = LogicalClock.DEFAULT.currentStep(); + prevValidityStep = updateGraph.clock().currentStep(); currentSource = newCurrent; updateCommitter.maybeActivate(); } @@ -293,9 +293,8 @@ public short getPrevShort(final long rowKey) { return prevSource.getPrevShort(rowKey); } - private boolean prevInvalid() { - return prevValidityStep == -1 || prevValidityStep != LogicalClock.DEFAULT.currentStep(); + return prevValidityStep == -1 || prevValidityStep != updateGraph.clock().currentStep(); } @Override diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UngroupedColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UngroupedColumnSource.java index 678f2f6da7b..3d819f4ff34 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UngroupedColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UngroupedColumnSource.java @@ -3,15 +3,15 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.AbstractColumnSource; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.vector.ObjectVector; import io.deephaven.vector.Vector; import io.deephaven.engine.table.impl.sources.aggregate.AggregateColumnSource; public abstract class UngroupedColumnSource extends AbstractColumnSource { - long lastPreviousClockTick = LogicalClock.DEFAULT.currentStep(); + long lastPreviousClockTick = updateGraph.clock().currentStep(); public void initializeBase(long base) { this.prevBase = base; @@ -24,7 +24,7 @@ public void startTrackingPrevValues() { } public void setBase(long base) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (lastPreviousClockTick != currentStep) { prevBase = this.base; lastPreviousClockTick = currentStep; @@ -33,7 +33,7 @@ public void setBase(long base) { } public long getPrevBase() { - if (lastPreviousClockTick == LogicalClock.DEFAULT.currentStep()) { + if (lastPreviousClockTick == updateGraph.clock().currentStep()) { return prevBase; } else { return base; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnionSourceManager.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnionSourceManager.java index 562ffc3b7d3..dd4559f34f0 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnionSourceManager.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/UnionSourceManager.java @@ -96,7 +96,8 @@ public UnionSourceManager(@NotNull final PartitionedTable partitionedTable) { coalescedPartitions.addUpdateListener(constituentChangesListener); listenerRecorders.offer(constituentChangesListener); - updateCommitter = new UpdateCommitter<>(this, usm -> usm.unionRedirection.copyCurrToPrev()); + updateCommitter = new UpdateCommitter<>(this, partitionedTable.table().getUpdateGraph(), + usm -> usm.unionRedirection.copyCurrToPrev()); } else { listenerRecorders = null; mergedListener = null; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/ChunkAdapter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/ChunkAdapter.java index 7c93bc80d4d..0eb5198b39f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/ChunkAdapter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/ChunkAdapter.java @@ -8,10 +8,11 @@ import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ChunkSink; import io.deephaven.chunk.*; +import io.deephaven.util.SafeCloseable; -public class ChunkAdapter { - public static ChunkAdapter create(Class type, final ChunkSink baseline, - final ChunkSink delta) { +public class ChunkAdapter implements SafeCloseable { + + public static ChunkAdapter create(Class type, final ChunkSink baseline, final ChunkSink delta) { // noinspection unchecked return type == Boolean.class ? (ChunkAdapter) new BooleanChunkAdapter(baseline, delta) : new ChunkAdapter<>(baseline, delta); @@ -168,6 +169,12 @@ private void finishSet(final long index) { delta.fillFromChunk(deltaFillFromContext, baseChunk, soleKey); } + @Override + public void close() { + final SafeCloseable deltaContextToClose = deltaContext == baselineContext ? null : deltaContext; + SafeCloseable.closeAll(baselineContext, deltaContextToClose, deltaFillFromContext, soleKey, baseChunk); + } + private static class BooleanChunkAdapter extends ChunkAdapter { BooleanChunkAdapter(ChunkSink baseline, ChunkSink delta) { super(baseline, delta); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java index 46c88351884..b96a16fa443 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/DeltaAwareColumnSource.java @@ -16,7 +16,9 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; // This worked-out example is a sketch of the problem we are trying to solve. // @@ -262,30 +264,33 @@ private Chunk getOrFillChunk(@NotNull DAContext context, Writabl // deltaKeysDS: the above, translated to the delta coordinate space final RowSet[] splitResult = new RowSet[2]; splitKeys(rowSequence, dRows, splitResult); - final RowSet baselineKeysBS = splitResult[1]; - final RowSet deltaKeysBS = splitResult[0]; + try (final RowSet baselineKeysBS = splitResult[1]; final RowSet deltaKeysBS = splitResult[0]) { - // If one or the other is empty, shortcut here - if (deltaKeysBS.isEmpty()) { - // By the way, baselineKeysBS equals rowSequence, so you could pick either one - return getOrFillSimple(baseline, context.baseline, optionalDest, baselineKeysBS); - } + // If one or the other is empty, shortcut here + if (deltaKeysBS.isEmpty()) { + // By the way, baselineKeysBS equals rowSequence, so you could pick either one + return getOrFillSimple(baseline, context.baseline, optionalDest, baselineKeysBS); + } - final RowSet deltaKeysDS = dRows.invert(deltaKeysBS); - if (baselineKeysBS.isEmpty()) { - return getOrFillSimple(delta, context.delta, optionalDest, deltaKeysDS); + try (final RowSet deltaKeysDS = dRows.invert(deltaKeysBS)) { + if (baselineKeysBS.isEmpty()) { + return getOrFillSimple(delta, context.delta, optionalDest, deltaKeysDS); + } + + // Always use "get" to pull in the baseline and delta pieces + final Chunk bChunk = baseline.getChunk(context.baseline.getContext, baselineKeysBS); + final Chunk dChunk = delta.getChunk(context.delta.getContext, deltaKeysDS); + // Merge them into either the user-provided chunk, or our own preallocated chunk. Note that 'destToUse' + // will always be non-null. This is because if we arrived here from fillChunk(), then optionalDest will + // be non-null. Otherwise (if we arrived here from getChunk()), then optionalDest will be null, but + // context.optionalChunk will be non-null (having been created through makeGetContext()). + final WritableChunk destToUse = optionalDest != null + ? optionalDest + : context.optionalChunk; + ChunkMerger.merge(bChunk, dChunk, baselineKeysBS, deltaKeysBS, destToUse); + return destToUse; + } } - - // Always use "get" to pull in the baseline and delta pieces - final Chunk bChunk = baseline.getChunk(context.baseline.getContext, baselineKeysBS); - final Chunk dChunk = delta.getChunk(context.delta.getContext, deltaKeysDS); - // Merge them into either the user-provided chunk, or our own preallocated chunk. Note that 'destToUse' will - // always be non-null. This is because if we arrived here from fillChunk(), then optionalDest will be non-null. - // Otherwise (if we arrived here from getChunk()), then optionalDest will be null, but context.optionalChunk - // will be non-null (having been created through makeGetContext()). - final WritableChunk destToUse = optionalDest != null ? optionalDest : context.optionalChunk; - ChunkMerger.merge(bChunk, dChunk, baselineKeysBS, deltaKeysBS, destToUse); - return destToUse; } @SuppressWarnings({"rawtypes", "unchecked"}) @@ -611,8 +616,10 @@ public void startTrackingPrevValues() { * twice during the lifetime of a given DeltaAwareColumnSource: once at construction and once at the time of * startTrackingPrevValues(). */ - chunkAdapter = ThreadLocal.withInitial(() -> ChunkAdapter.create(getType(), baseline, delta)); - updateCommitter = new UpdateCommitter<>(this, DeltaAwareColumnSource::commitValues); + try (final SafeCloseable ignored = chunkAdapter.get()) { + chunkAdapter = ThreadLocal.withInitial(() -> ChunkAdapter.create(getType(), baseline, delta)); + } + updateCommitter = new UpdateCommitter<>(this, updateGraph, DeltaAwareColumnSource::commitValues); } @Override @@ -625,6 +632,14 @@ public boolean isImmutable() { return false; } + @Override + public void releaseCachedResources() { + super.releaseCachedResources(); + try (final SafeCloseable ignored = chunkAdapter.get()) { + chunkAdapter.remove(); + } + } + /** * Partitions {@code lhs} into two indices: (lhs intersect rhs) and (lhs minus rhs). * @@ -661,14 +676,26 @@ static DAContext createForFill(ChunkSource baseline, ChunkSource delta, int chun */ final WritableChunk optionalChunk; - private DAContext(GetAndFillContexts baseline, GetAndFillContexts delta, WritableChunk optionalChunk) { + private DAContext( + @NotNull final GetAndFillContexts baseline, + @NotNull final GetAndFillContexts delta, + @Nullable final WritableChunk optionalChunk) { this.baseline = baseline; this.delta = delta; this.optionalChunk = optionalChunk; } + + @Override + public void close() { + baseline.close(); + delta.close(); + if (optionalChunk != null) { + optionalChunk.close(); + } + } } - private static class GetAndFillContexts { + private static class GetAndFillContexts implements SafeCloseable { @SuppressWarnings("rawtypes") static GetAndFillContexts createForGet(ChunkSource chunkSource, int chunkCapacity) { return new GetAndFillContexts(chunkSource.makeGetContext(chunkCapacity), null); @@ -689,9 +716,19 @@ static GetAndFillContexts createForFill(ChunkSource chunkSource, int chunkCapaci */ final ChunkSource.FillContext optionalFillContext; - private GetAndFillContexts(ChunkSource.GetContext getContext, ChunkSource.FillContext optionalFillContext) { + private GetAndFillContexts( + @NotNull final ChunkSource.GetContext getContext, + @Nullable final ChunkSource.FillContext optionalFillContext) { this.getContext = getContext; this.optionalFillContext = optionalFillContext; } + + @Override + public void close() { + getContext.close(); + if (optionalFillContext != null) { + optionalFillContext.close(); + } + } } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/SoleKey.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/SoleKey.java index 43863bd842f..084d18c9f29 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/SoleKey.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/deltaaware/SoleKey.java @@ -8,6 +8,7 @@ import io.deephaven.engine.rowset.RowSequenceFactory; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; +import io.deephaven.util.SafeCloseable; import io.deephaven.util.datastructures.LongAbortableConsumer; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.WritableLongChunk; @@ -116,6 +117,11 @@ public boolean forEachRowKeyRange(LongRangeAbortableConsumer larc) { return larc.accept(key, key); } + @Override + public void close() { + SafeCloseable.closeAll(keyIndicesChunk, keyRangesChunk); + } + static class SoleKeyIterator implements Iterator { private final long key; private boolean hasMore; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ring/AddsToRingsListener.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ring/AddsToRingsListener.java index 6add7cb28b6..0b6b13d9d26 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ring/AddsToRingsListener.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/sources/ring/AddsToRingsListener.java @@ -4,6 +4,7 @@ package io.deephaven.engine.table.impl.sources.ring; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.WritableRowSet; @@ -142,7 +143,8 @@ private AddsToRingsListener( throw new IllegalArgumentException(); } } - prevFlusher = new UpdateCommitter<>(this, AddsToRingsListener::bringPreviousUpToDate); + prevFlusher = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), + AddsToRingsListener::bringPreviousUpToDate); } private WritableRowSet resultRowSet() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/updateby/UpdateBy.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/updateby/UpdateBy.java index 1892929803c..0fe2f747bf6 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/updateby/UpdateBy.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/updateby/UpdateBy.java @@ -28,10 +28,8 @@ import io.deephaven.engine.table.impl.sources.*; import io.deephaven.engine.table.impl.sources.sparse.SparseConstants; import io.deephaven.engine.table.impl.util.*; -import io.deephaven.engine.updategraph.DynamicNode; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.TerminalNotification; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.*; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; import io.deephaven.util.SafeCloseable; import io.deephaven.util.SafeCloseableArray; @@ -255,6 +253,7 @@ class PhasedUpdateProcessor implements LogOutputAppendable { /** For cacheable sources, track how many windows require this source */ final AtomicIntegerArray inputSourceReferenceCounts; final JobScheduler jobScheduler; + final ExecutionContext executionContext; final CompletableFuture waitForResult; /*** @@ -305,6 +304,9 @@ class PhasedUpdateProcessor implements LogOutputAppendable { } else { jobScheduler = ImmediateJobScheduler.INSTANCE; } + executionContext = ExecutionContext.newBuilder() + .captureUpdateGraph() + .markSystemic().build(); waitForResult = new CompletableFuture<>(); } else { // Determine which windows need to be computed. @@ -328,11 +330,14 @@ class PhasedUpdateProcessor implements LogOutputAppendable { } } // Create the proper JobScheduler for the following parallel tasks - if (UpdateGraphProcessor.DEFAULT.getUpdateThreads() > 1) { - jobScheduler = new UpdateGraphProcessorJobScheduler(); + if (source.getUpdateGraph().parallelismFactor() > 1) { + jobScheduler = new UpdateGraphJobScheduler(source.getUpdateGraph()); } else { jobScheduler = ImmediateJobScheduler.INSTANCE; } + executionContext = ExecutionContext.newBuilder() + .setUpdateGraph(result().getUpdateGraph()) + .markSystemic().build(); waitForResult = null; } } @@ -476,7 +481,7 @@ private void computeCachedColumnRowSets(final Runnable onComputeComplete) { final int[] dirtyWindowIndices = dirtyWindows.stream().toArray(); - jobScheduler.iterateParallel(ExecutionContext.getContextToRecord(), + jobScheduler.iterateParallel(executionContext, chainAppendables(this, stringToAppendable("-computeCachedColumnRowSets")), JobScheduler.DEFAULT_CONTEXT_FACTORY, 0, cacheableSourceIndices.length, (context, idx, nec) -> { @@ -528,7 +533,7 @@ private void computeCachedColumnRowSets(final Runnable onComputeComplete) { */ private void prepareForParallelPopulation( final Runnable onParallelPopulationComplete) { - jobScheduler.iterateParallel(ExecutionContext.getContextToRecord(), + jobScheduler.iterateParallel(executionContext, chainAppendables(this, stringToAppendable("-prepareForParallelPopulation")), JobScheduler.DEFAULT_CONTEXT_FACTORY, 0, windows.length, @@ -588,7 +593,7 @@ private void processWindows(final Runnable onWindowsComplete) { final int[] dirtyWindowIndices = dirtyWindows.stream().toArray(); - jobScheduler.iterateSerial(ExecutionContext.getContextToRecord(), + jobScheduler.iterateSerial(executionContext, chainAppendables(this, stringToAppendable("-processWindows")), JobScheduler.DEFAULT_CONTEXT_FACTORY, 0, dirtyWindowIndices.length, @@ -675,7 +680,7 @@ private void processWindowOperators( operatorSets.add(opList.toArray()); // Process each set of similar operators in this window serially. - jobScheduler.iterateSerial(ExecutionContext.getContextToRecord(), + jobScheduler.iterateSerial(executionContext, chainAppendables(this, stringAndIndexToAppendable("-processWindowOperators", winIdx)), JobScheduler.DEFAULT_CONTEXT_FACTORY, 0, operatorSets.size(), @@ -714,7 +719,7 @@ private void cacheOperatorInputSources( return; } - jobScheduler.iterateParallel(ExecutionContext.getContextToRecord(), + jobScheduler.iterateParallel(executionContext, chainAppendables(this, stringAndIndexToAppendable("-cacheOperatorInputSources", winIdx)), JobScheduler.DEFAULT_CONTEXT_FACTORY, 0, srcIndices.length, (context, idx, nestedErrorConsumer, sourceComplete) -> createCachedColumnSource( @@ -773,7 +778,7 @@ public void close() { } } - jobScheduler.iterateParallel(ExecutionContext.getContextToRecord(), + jobScheduler.iterateParallel(executionContext, chainAppendables(this, stringToAppendable("-createCachedColumnSource")), BatchThreadContext::new, 0, taskCount, (ctx, idx, nec) -> { @@ -839,7 +844,7 @@ public void close() { } } - jobScheduler.iterateParallel(ExecutionContext.getContextToRecord(), + jobScheduler.iterateParallel(executionContext, chainAppendables(this, stringAndIndexToAppendable("-processWindowBucketOperators", winIdx)), OperatorThreadContext::new, 0, dirtyBuckets.length, @@ -927,7 +932,7 @@ private void cleanUpAndNotify(final Runnable onCleanupComplete) { outerNugget.addBaseEntry(accumulated); } } else { - UpdateGraphProcessor.DEFAULT.addNotification(new TerminalNotification() { + source.getUpdateGraph().addNotification(new TerminalNotification() { @Override public void run() { synchronized (accumulated) { @@ -1025,7 +1030,7 @@ private void cleanUpAfterError() { } /** - * Disconnect result from the {@link UpdateGraphProcessor}, deliver downstream failure notifications, and cleanup if + * Disconnect result from the {@link PeriodicUpdateGraph}, deliver downstream failure notifications, and cleanup if * needed. * * @param error The {@link Throwable} to deliver, either from upstream or update processing @@ -1089,7 +1094,8 @@ public void onUpdate(@NotNull final TableUpdate upstream) { final QueryTable result = result(); if (result.isFailed()) { Assert.eq(result.getLastNotificationStep(), "result.getLastNotificationStep()", - LogicalClock.DEFAULT.currentStep(), "LogicalClock.DEFAULT.currentStep()"); + getUpdateGraph().clock().currentStep(), + "getUpdateGraph().clock().currentStep()"); return; } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/BaseArrayBackedMutableTable.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/BaseArrayBackedMutableTable.java index 2a7b47d29df..fc1c75d69df 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/BaseArrayBackedMutableTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/BaseArrayBackedMutableTable.java @@ -15,8 +15,6 @@ import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.config.InputTableStatusListener; import io.deephaven.engine.util.config.MutableInputTable; import io.deephaven.engine.table.impl.QueryTable; @@ -50,7 +48,7 @@ abstract class BaseArrayBackedMutableTable extends UpdatableTable { private final Map enumValues; private String description = getDefaultDescription(); - private Runnable onPendingChange = UpdateGraphProcessor.DEFAULT::requestRefresh; + private Runnable onPendingChange = updateGraph::requestRefresh; long nextRow = 0; private long pendingProcessed = -1L; @@ -107,7 +105,7 @@ public void modifyRowKey(long key) { }); result.getRowSet().writableCast().insert(builder.build()); result.getRowSet().writableCast().initializePreviousValue(); - UpdateGraphProcessor.DEFAULT.addSource(result); + result.getUpdateGraph().addSource(result); } public BaseArrayBackedMutableTable setDescription(String newDescription) { @@ -123,7 +121,9 @@ public BaseArrayBackedMutableTable setDescription(String newDescription) { */ @TestUseOnly void setOnPendingChange(final Runnable onPendingChange) { - this.onPendingChange = onPendingChange == null ? UpdateGraphProcessor.DEFAULT::requestRefresh : onPendingChange; + this.onPendingChange = onPendingChange == null + ? updateGraph::requestRefresh + : onPendingChange; } private void processPending(RowSetChangeRecorder rowSetChangeRecorder) { @@ -304,7 +304,7 @@ private void asynchronousContinuation( } private void checkBlockingEditSafety() { - if (UpdateGraphProcessor.DEFAULT.isRefreshThread()) { + if (updateGraph.currentThreadProcessesUpdates()) { throw new UnsupportedOperationException("Attempted to make a blocking input table edit from a listener " + "or notification. This is unsupported, because it will block the update graph from making " + "progress."); @@ -313,8 +313,8 @@ private void checkBlockingEditSafety() { private void checkAsyncEditSafety(@NotNull final Table changeData) { if (changeData.isRefreshing() - && UpdateGraphProcessor.DEFAULT.isRefreshThread() - && !changeData.satisfied(LogicalClock.DEFAULT.currentStep())) { + && updateGraph.currentThreadProcessesUpdates() + && !changeData.satisfied(updateGraph.clock().currentStep())) { throw new UnsupportedOperationException("Attempted to make an asynchronous input table edit from a " + "listener or notification before the change data table is satisfied on the current cycle. " + "This is unsupported, because it may block the update graph from making progress or produce " @@ -328,7 +328,7 @@ public String getDescription() { } void waitForSequence(long sequence) { - if (UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread()) { + if (updateGraph.exclusiveLock().isHeldByCurrentThread()) { // We're holding the lock. currentTable had better be refreshing. Wait on its UGP condition // in order to allow updates. while (processedSequence < sequence) { @@ -355,7 +355,7 @@ public void setRows(@NotNull Table defaultValues, int[] rowArray, Map> columnDefinitions = getTableDefinition().getColumns(); diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ContiguousWritableRowRedirection.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ContiguousWritableRowRedirection.java index 12cba258127..c17eef290a2 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ContiguousWritableRowRedirection.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ContiguousWritableRowRedirection.java @@ -8,6 +8,7 @@ import io.deephaven.base.verify.Require; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableLongChunk; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.engine.updategraph.UpdateCommitter; @@ -147,7 +148,9 @@ public synchronized void startTrackingPrevValues() { Assert.eqNull(updateCommitter, "updateCommitter"); checkpoint = new TLongLongHashMap(Math.min(size, 1024 * 1024), 0.75f, UPDATES_KEY_NOT_FOUND, UPDATES_KEY_NOT_FOUND); - updateCommitter = new UpdateCommitter<>(this, ContiguousWritableRowRedirection::commitUpdates); + updateCommitter = new UpdateCommitter<>(this, + ExecutionContext.getContext().getUpdateGraph(), + ContiguousWritableRowRedirection::commitUpdates); } private synchronized void commitUpdates() { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java index 166f6949cfa..d7edcb0b118 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/DynamicTableWriter.java @@ -7,6 +7,7 @@ import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.Table; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.qst.column.header.ColumnHeader; import io.deephaven.qst.table.TableHeader; import io.deephaven.qst.type.Type; @@ -14,7 +15,6 @@ import io.deephaven.tablelogger.RowSetter; import io.deephaven.tablelogger.TableWriter; import io.deephaven.engine.table.TableDefinition; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.util.QueryConstants; import io.deephaven.engine.table.impl.UpdateSourceQueryTable; import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; @@ -36,7 +36,7 @@ * This class is not thread safe, you must synchronize externally. However, multiple setters may safely log * concurrently. * - * @implNote The constructor publishes {@code this} to the {@link UpdateGraphProcessor} and thus cannot be subclassed. + * @implNote The constructor publishes {@code this} to the {@link UpdateGraph} and thus cannot be subclassed. */ public final class DynamicTableWriter implements TableWriter { private final UpdateSourceQueryTable table; @@ -135,7 +135,7 @@ public DynamicTableWriter(TableDefinition definition, Map consta /** * Gets the table created by this DynamicTableWriter. *

- * The returned table is registered with the UpdateGraphProcessor, and new rows become visible within the run loop. + * The returned table is registered with the PeriodicUpdateGraph, and new rows become visible within the run loop. * * @return a live table with the output of this log */ @@ -192,7 +192,7 @@ public void setFlags(Row.Flags flags) { /** * Writes the current row created with the {@code getSetter} call, and advances the current row by one. *

- * The row will be made visible in the table after the UpdateGraphProcessor run cycle completes. + * The row will be made visible in the table after the PeriodicUpdateGraph run cycle completes. */ @Override public void writeRow() { @@ -413,7 +413,8 @@ private DynamicTableWriter(final Map> sources, final Map (currentRow) -> createRowSetter(source.getType(), (WritableColumnSource) source)); ++ii; } - UpdateGraphProcessor.DEFAULT.addSource(table); + UpdateGraph updateGraph = table.getUpdateGraph(); + updateGraph.addSource(table); } @SuppressWarnings("unchecked") diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/FunctionGeneratedTableFactory.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/FunctionGeneratedTableFactory.java index b70b8039d13..3592d6c5376 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/FunctionGeneratedTableFactory.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/FunctionGeneratedTableFactory.java @@ -5,10 +5,10 @@ import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.*; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.BaseTable; import io.deephaven.engine.table.impl.sources.*; +import io.deephaven.engine.updategraph.UpdateGraph; import java.util.LinkedHashMap; import java.util.Map; @@ -131,7 +131,7 @@ private void copyTable(Table source) { } /** - * @implNote The constructor publishes {@code this} to the {@link UpdateGraphProcessor} and cannot be subclassed. + * @implNote The constructor publishes {@code this} to the {@link UpdateGraph} and cannot be subclassed. */ private final class FunctionBackedTable extends QueryTable implements Runnable { FunctionBackedTable(TrackingRowSet rowSet, Map> columns) { @@ -139,7 +139,7 @@ private final class FunctionBackedTable extends QueryTable implements Runnable { if (refreshIntervalMs >= 0) { setRefreshing(true); if (refreshIntervalMs > 0) { - UpdateGraphProcessor.DEFAULT.addSource(this); + updateGraph.addSource(this); } } } @@ -184,7 +184,7 @@ private void doRefresh() { public void destroy() { super.destroy(); if (refreshIntervalMs > 0) { - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/HashSetBackedTableFactory.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/HashSetBackedTableFactory.java index 3eb2f162e35..a479f327364 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/HashSetBackedTableFactory.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/HashSetBackedTableFactory.java @@ -4,14 +4,13 @@ package io.deephaven.engine.table.impl.util; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import gnu.trove.iterator.TObjectLongIterator; import gnu.trove.list.array.TLongArrayList; @@ -21,6 +20,7 @@ import gnu.trove.map.hash.TLongLongHashMap; import gnu.trove.map.hash.TLongObjectHashMap; import gnu.trove.map.hash.TObjectLongHashMap; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.tuple.ArrayTuple; import java.util.HashSet; @@ -41,6 +41,9 @@ public class HashSetBackedTableFactory { private final int refreshIntervalMs; private long nextRefresh; private final Map> columns; + + private final UpdateGraph updateGraph; + private final TObjectLongMap valueToIndexMap = new TObjectLongHashMap<>(); private final TLongObjectMap indexToValueMap = new TLongObjectHashMap<>(); @@ -61,6 +64,8 @@ private HashSetBackedTableFactory(Supplier> setGenerator, in for (int ii = 0; ii < colNames.length; ++ii) { columns.put(colNames[ii], new ArrayTupleWrapperColumnSource(ii)); } + + updateGraph = ExecutionContext.getContext().getUpdateGraph(); } /** @@ -120,7 +125,7 @@ private void removeValue(TObjectLongIterator vtiIt, RowSetBuilderRan indexToPreviousMap.put(index, vtiIt.key()); vtiIt.remove(); - indexToPreviousClock.put(index, LogicalClock.DEFAULT.currentStep()); + indexToPreviousClock.put(index, updateGraph.clock().currentStep()); indexToValueMap.remove(index); removedBuilder.addKey(index); @@ -139,21 +144,21 @@ private void addValue(ArrayTuple value, RowSetBuilderRandom addedBuilder) { valueToIndexMap.put(value, newIndex); indexToValueMap.put(newIndex, value); - if (indexToPreviousClock.get(newIndex) != LogicalClock.DEFAULT.currentStep()) { - indexToPreviousClock.put(newIndex, LogicalClock.DEFAULT.currentStep()); + if (indexToPreviousClock.get(newIndex) != updateGraph.clock().currentStep()) { + indexToPreviousClock.put(newIndex, updateGraph.clock().currentStep()); indexToPreviousMap.put(newIndex, null); } } /** - * @implNote The constructor publishes {@code this} to the {@link UpdateGraphProcessor} and cannot be subclassed. + * @implNote The constructor publishes {@code this} to the {@link UpdateGraph} and cannot be subclassed. */ private final class HashSetBackedTable extends QueryTable implements Runnable { HashSetBackedTable(TrackingRowSet rowSet, Map> columns) { super(rowSet, columns); if (refreshIntervalMs >= 0) { setRefreshing(true); - UpdateGraphProcessor.DEFAULT.addSource(this); + updateGraph.addSource(this); } } @@ -186,7 +191,7 @@ public void run() { public void destroy() { super.destroy(); if (refreshIntervalMs >= 0) { - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } } } @@ -214,7 +219,7 @@ public String get(long rowKey) { @Override public String getPrev(long rowKey) { synchronized (HashSetBackedTableFactory.this) { - if (indexToPreviousClock.get(rowKey) == LogicalClock.DEFAULT.currentStep()) { + if (indexToPreviousClock.get(rowKey) == updateGraph.clock().currentStep()) { ArrayTuple row = indexToPreviousMap.get(rowKey); if (row == null) return null; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ImmediateJobScheduler.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ImmediateJobScheduler.java index 4ae7b9a82ed..f5fab51564e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ImmediateJobScheduler.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ImmediateJobScheduler.java @@ -18,6 +18,7 @@ public void submit( final Runnable runnable, final LogOutputAppendable description, final Consumer onError) { + // We do not need to install the update context since we are not changing thread contexts. try (SafeCloseable ignored = executionContext != null ? executionContext.open() : null) { runnable.run(); } catch (Exception e) { diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/OperationInitializationPoolJobScheduler.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/OperationInitializationPoolJobScheduler.java index 2bb865c30b0..104184ba3c8 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/OperationInitializationPoolJobScheduler.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/OperationInitializationPoolJobScheduler.java @@ -4,7 +4,6 @@ import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.OperationInitializationThreadPool; import io.deephaven.engine.table.impl.perf.BasePerformanceEntry; -import io.deephaven.engine.table.impl.util.JobScheduler; import io.deephaven.io.log.impl.LogOutputStringImpl; import io.deephaven.util.SafeCloseable; import io.deephaven.util.process.ProcessEnvironment; diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ServerStateTracker.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ServerStateTracker.java index bf5c183f5fd..9a6b53c07ac 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ServerStateTracker.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/ServerStateTracker.java @@ -8,7 +8,7 @@ import io.deephaven.engine.tablelogger.ServerStateLogLogger; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.tablelogger.impl.memory.MemoryTableLogger; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.io.logger.Logger; import io.deephaven.internal.log.LoggerFactory; @@ -38,12 +38,12 @@ public static ServerStateTracker getInstance() { private final Logger logger; private final ServerStateLogLogger processMemLogger; - private final UpdateGraphProcessor.AccumulatedCycleStats ugpAccumCycleStats; + private final PeriodicUpdateGraph.AccumulatedCycleStats ugpAccumCycleStats; private ServerStateTracker() { logger = LoggerFactory.getLogger(ServerStateTracker.class); processMemLogger = EngineTableLoggers.get().serverStateLogLogger(); - ugpAccumCycleStats = new UpdateGraphProcessor.AccumulatedCycleStats(); + ugpAccumCycleStats = new PeriodicUpdateGraph.AccumulatedCycleStats(); } private void startThread() { @@ -126,7 +126,8 @@ public void run() { final long prevTotalCollections = memSample.totalCollections; final long prevTotalCollectionTimeMs = memSample.totalCollectionTimeMs; RuntimeMemory.getInstance().read(memSample); - UpdateGraphProcessor.DEFAULT.accumulatedCycleStats.take(ugpAccumCycleStats); + PeriodicUpdateGraph updateGraph = getQueryTable().getUpdateGraph().cast(); + updateGraph.takeAccumulatedCycleStats(ugpAccumCycleStats); final long endTimeMillis = System.currentTimeMillis(); logProcessMem( intervalStartTimeMillis, diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/SyncTableFilter.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/SyncTableFilter.java index 84918d4d72d..6bc5ade7e40 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/SyncTableFilter.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/SyncTableFilter.java @@ -14,10 +14,10 @@ import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.*; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.NotificationQueue; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; import io.deephaven.engine.table.impl.*; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; @@ -106,8 +106,10 @@ private SyncTableFilter(final List tables) { throw new IllegalArgumentException("No tables specified!"); } + final Table[] engineTables = tables.stream().map(t -> t.table).toArray(Table[]::new); + final UpdateGraph updateGraph = NotificationQueue.Dependency.getUpdateGraph(null, engineTables); if (tables.stream().anyMatch(t -> t.table.isRefreshing())) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + updateGraph.checkInitiateSerialTableOperation(); } // through the builder only @@ -183,7 +185,7 @@ class MergedSyncListener extends io.deephaven.engine.table.impl.MergedListener { @Override protected void process() { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = getUpdateGraph().clock().currentStep(); for (int rr = 0; rr < recorders.size(); ++rr) { final ListenerRecorder recorder = recorders.get(rr); @@ -261,7 +263,7 @@ protected boolean systemicResult() { protected void propagateErrorDownstream( final boolean fromProcess, @NotNull final Throwable error, @Nullable final TableListener.Entry entry) { if (fromProcess) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = getUpdateGraph().clock().currentStep(); final Collection resultsNeedingDelayedNotification = new ArrayList<>(); for (final QueryTable result : results) { if (result.getLastNotificationStep() == currentStep) { @@ -562,7 +564,7 @@ public Builder defaultKeys(final String... keys) { /** * Instantiate the map of synchronized tables. * - * This must be called under the UpdateGraphProcessor lock. + * This must be called under the PeriodicUpdateGraph lock. * * @return a map with one entry for each input table */ diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/UpdateGraphProcessorJobScheduler.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/UpdateGraphJobScheduler.java similarity index 85% rename from engine/table/src/main/java/io/deephaven/engine/table/impl/util/UpdateGraphProcessorJobScheduler.java rename to engine/table/src/main/java/io/deephaven/engine/table/impl/util/UpdateGraphJobScheduler.java index bf47cfdc0fa..2d799e0582b 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/UpdateGraphProcessorJobScheduler.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/UpdateGraphJobScheduler.java @@ -5,23 +5,29 @@ import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.perf.BasePerformanceEntry; import io.deephaven.engine.updategraph.AbstractNotification; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.io.log.impl.LogOutputStringImpl; import io.deephaven.util.SafeCloseable; import io.deephaven.util.process.ProcessEnvironment; import java.util.function.Consumer; -public class UpdateGraphProcessorJobScheduler implements JobScheduler { +public class UpdateGraphJobScheduler implements JobScheduler { final BasePerformanceEntry accumulatedBaseEntry = new BasePerformanceEntry(); + private final UpdateGraph updateGraph; + + public UpdateGraphJobScheduler(final UpdateGraph updateGraph) { + this.updateGraph = updateGraph; + } + @Override public void submit( final ExecutionContext executionContext, final Runnable runnable, final LogOutputAppendable description, final Consumer onError) { - UpdateGraphProcessor.DEFAULT.addNotification(new AbstractNotification(false) { + updateGraph.addNotification(new AbstractNotification(false) { @Override public boolean canExecute(long step) { return true; @@ -62,6 +68,6 @@ public BasePerformanceEntry getAccumulatedPerformance() { @Override public int threadCount() { - return UpdateGraphProcessor.DEFAULT.getUpdateThreads(); + return updateGraph.parallelismFactor(); } } diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/WritableRowRedirectionLockFree.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/WritableRowRedirectionLockFree.java index 3df2a9000f0..903f5a2f035 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/WritableRowRedirectionLockFree.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/WritableRowRedirectionLockFree.java @@ -6,6 +6,7 @@ import gnu.trove.iterator.TLongLongIterator; import io.deephaven.base.verify.Assert; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.updategraph.UpdateCommitter; @@ -242,7 +243,8 @@ public void startTrackingPrevValues() { Assert.eqNull(updateCommitter, "updateCommitter"); Assert.eq(baseline, "baseline", updates, "updates"); updates = createUpdateMap(); - updateCommitter = new UpdateCommitter<>(this, WritableRowRedirectionLockFree::commitUpdates); + updateCommitter = new UpdateCommitter<>(this, ExecutionContext.getContext().getUpdateGraph(), + WritableRowRedirectionLockFree::commitUpdates); } /** diff --git a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/WritableSingleValueRowRedirection.java b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/WritableSingleValueRowRedirection.java index 36daa3f9593..4f97d1fd221 100644 --- a/engine/table/src/main/java/io/deephaven/engine/table/impl/util/WritableSingleValueRowRedirection.java +++ b/engine/table/src/main/java/io/deephaven/engine/table/impl/util/WritableSingleValueRowRedirection.java @@ -4,9 +4,10 @@ package io.deephaven.engine.table.impl.util; import io.deephaven.chunk.WritableChunk; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.updategraph.UpdateGraph; import org.jetbrains.annotations.NotNull; public class WritableSingleValueRowRedirection extends SingleValueRowRedirection { @@ -14,8 +15,11 @@ public class WritableSingleValueRowRedirection extends SingleValueRowRedirection private long prevValue; private long updatedClockTick = 0; + private final UpdateGraph updateGraph; + public WritableSingleValueRowRedirection(final long value) { super(value); + updateGraph = ExecutionContext.getContext().getUpdateGraph(); } @Override @@ -25,14 +29,16 @@ public synchronized long get(long outerRowKey) { @Override public synchronized long getPrev(long outerRowKey) { - if (updatedClockTick > 0 && updatedClockTick == LogicalClock.DEFAULT.currentStep()) { - return prevValue; + if (updatedClockTick > 0) { + if (updatedClockTick == updateGraph.clock().currentStep()) { + return prevValue; + } } return value; } public synchronized void setValue(long newValue) { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); if (updatedClockTick > 0 && updatedClockTick != currentStep) { prevValue = value; updatedClockTick = currentStep; @@ -47,7 +53,7 @@ public long getValue() { public void startTrackingPrevValues() { prevValue = value; - updatedClockTick = LogicalClock.DEFAULT.currentStep(); + updatedClockTick = updateGraph.clock().currentStep(); } @Override @@ -60,8 +66,9 @@ public void fillPrevChunk( @NotNull FillContext fillContext, @NotNull WritableChunk innerRowKeys, @NotNull RowSequence outerRowKeys) { - final long fillValue = - (updatedClockTick > 0 && updatedClockTick == LogicalClock.DEFAULT.currentStep()) ? prevValue : value; + final long fillValue = (updatedClockTick > 0 && updatedClockTick == updateGraph.clock().currentStep()) + ? prevValue + : value; final int sz = outerRowKeys.intSize(); innerRowKeys.setSize(sz); innerRowKeys.asWritableLongChunk().fillWithValue(0, sz, fillValue); diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphProcessor.java b/engine/table/src/main/java/io/deephaven/engine/updategraph/impl/PeriodicUpdateGraph.java similarity index 80% rename from engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphProcessor.java rename to engine/table/src/main/java/io/deephaven/engine/updategraph/impl/PeriodicUpdateGraph.java index 0aaba5c8f62..68e2a7e6dd3 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphProcessor.java +++ b/engine/table/src/main/java/io/deephaven/engine/updategraph/impl/PeriodicUpdateGraph.java @@ -1,20 +1,24 @@ /** * Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending */ -package io.deephaven.engine.updategraph; +package io.deephaven.engine.updategraph.impl; import io.deephaven.UncheckedDeephavenException; import io.deephaven.base.SleepUtil; import io.deephaven.base.log.LogOutput; import io.deephaven.base.reference.SimpleReference; import io.deephaven.base.verify.Assert; -import io.deephaven.configuration.Configuration; import io.deephaven.chunk.util.pools.MultiChunkPool; +import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessManager; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; +import io.deephaven.engine.updategraph.*; import io.deephaven.engine.util.reference.CleanupReferenceProcessorInstance; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; +import io.deephaven.hash.KeyedObjectHashMap; +import io.deephaven.hash.KeyedObjectKey; import io.deephaven.hotspot.JvmIntrospectionContext; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.log.LogEntry; @@ -41,10 +45,8 @@ import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.locks.Condition; import java.util.function.BooleanSupplier; import java.util.function.LongConsumer; -import java.util.function.Supplier; /** *

@@ -58,13 +60,23 @@ * defined) * */ -public enum UpdateGraphProcessor implements UpdateSourceRegistrar, NotificationQueue, NotificationQueue.Dependency { - DEFAULT; +public class PeriodicUpdateGraph implements UpdateGraph { + + public static final String DEFAULT_UPDATE_GRAPH_NAME = "DEFAULT"; + public static final int NUM_THREADS_DEFAULT_UPDATE_GRAPH = + Configuration.getInstance().getIntegerWithDefault("PeriodicUpdateGraph.updateThreads", -1); - private final Logger log = LoggerFactory.getLogger(UpdateGraphProcessor.class); + private static final KeyedObjectHashMap INSTANCES = new KeyedObjectHashMap<>( + new KeyedObjectKey.BasicAdapter<>(PeriodicUpdateGraph::getName)); + + public static Builder newBuilder(final String name) { + return new Builder(name); + } + + private final Logger log = LoggerFactory.getLogger(PeriodicUpdateGraph.class); /** - * Update sources that are part of this UpdateGraphProcessor. + * Update sources that are part of this PeriodicUpdateGraph. */ private final SimpleReferenceManager sources = new SimpleReferenceManager<>(UpdateSourceRefreshNotification::new); @@ -92,11 +104,12 @@ public enum UpdateGraphProcessor implements UpdateSourceRegistrar, NotificationQ private final AtomicBoolean refreshRequested = new AtomicBoolean(); private final Thread refreshThread; + private volatile boolean running = true; /** * If this is set to a positive value, then we will call the {@link #watchDogTimeoutProcedure} if any single run * loop takes longer than this value. The intention is to use this for strategies, or other queries, where a - * UpdateGraphProcessor loop that is "stuck" is the equivalent of an error. Set the value with + * PeriodicUpdateGraph loop that is "stuck" is the equivalent of an error. Set the value with * {@link #setWatchDogMillis(int)}. */ private int watchDogMillis = 0; @@ -107,23 +120,20 @@ public enum UpdateGraphProcessor implements UpdateSourceRegistrar, NotificationQ */ private LongConsumer watchDogTimeoutProcedure = null; - public static final String ALLOW_UNIT_TEST_MODE_PROP = "UpdateGraphProcessor.allowUnitTestMode"; - private final boolean ALLOW_UNIT_TEST_MODE = - Configuration.getInstance().getBooleanWithDefault(ALLOW_UNIT_TEST_MODE_PROP, false); + public static final String ALLOW_UNIT_TEST_MODE_PROP = "PeriodicUpdateGraph.allowUnitTestMode"; + private final boolean ALLOW_UNIT_TEST_MODE; private int notificationAdditionDelay = 0; private Random notificationRandomizer = new Random(0); private boolean unitTestMode = false; private ExecutorService unitTestRefreshThreadPool; - private static final String DEFAULT_TARGET_CYCLE_DURATION_MILLIS_PROP = - "UpdateGraphProcessor.targetCycleDurationMillis"; - private static final String MINIMUM_CYCLE_DURATION_TO_LOG_MILLIS_PROP = - "UpdateGraphProcessor.minimumCycleDurationToLogMillis"; - private final long DEFAULT_TARGET_CYCLE_DURATION_MILLIS = - Configuration.getInstance().getIntegerWithDefault(DEFAULT_TARGET_CYCLE_DURATION_MILLIS_PROP, 1000); - private volatile long targetCycleDurationMillis = DEFAULT_TARGET_CYCLE_DURATION_MILLIS; - private final long minimumCycleDurationToLogNanos = TimeUnit.MILLISECONDS.toNanos( - Configuration.getInstance().getIntegerWithDefault(MINIMUM_CYCLE_DURATION_TO_LOG_MILLIS_PROP, 25)); + public static final String DEFAULT_TARGET_CYCLE_DURATION_MILLIS_PROP = + "PeriodicUpdateGraph.targetCycleDurationMillis"; + public static final String MINIMUM_CYCLE_DURATION_TO_LOG_MILLIS_PROP = + "PeriodicUpdateGraph.minimumCycleDurationToLogMillis"; + private final long DEFAULT_TARGET_CYCLE_DURATION_MILLIS; + private volatile long targetCycleDurationMillis; + private final long minimumCycleDurationToLogNanos; /** * How many cycles we have not logged, but were non-zero. @@ -133,7 +143,7 @@ public enum UpdateGraphProcessor implements UpdateSourceRegistrar, NotificationQ private long suppressedCyclesTotalSafePointTimeMillis = 0; /** - * Accumulated UGP exclusive lock waits for the current cycle (or previous, if idle). + * Accumulated UpdateGraph exclusive lock waits for the current cycle (or previous, if idle). */ private long currentCycleLockWaitTotalNanos = 0; /** @@ -226,7 +236,7 @@ public synchronized void take(final AccumulatedCycleStats out) { /** * The number of threads in our executor service for dispatching notifications. If 1, then we don't actually use the - * executor service; but instead dispatch all the notifications on the UpdateGraphProcessor run thread. + * executor service; but instead dispatch all the notifications on the PeriodicUpdateGraph run thread. */ private final int updateThreads; @@ -234,56 +244,75 @@ public synchronized void take(final AccumulatedCycleStats out) { * Is this one of the threads engaged in notification processing? (Either the solitary run thread, or one of the * pooled threads it uses in some configurations) */ - private final ThreadLocal isRefreshThread = ThreadLocal.withInitial(() -> false); + private final ThreadLocal isUpdateThread = ThreadLocal.withInitial(() -> false); - private final boolean CHECK_TABLE_OPERATIONS = - Configuration.getInstance().getBooleanWithDefault("UpdateGraphProcessor.checkTableOperations", false); - private final ThreadLocal checkTableOperations = ThreadLocal.withInitial(() -> CHECK_TABLE_OPERATIONS); + private final ThreadLocal serialTableOperationsSafe = ThreadLocal.withInitial(() -> false); private final long minimumInterCycleSleep = - Configuration.getInstance().getIntegerWithDefault("UpdateGraphProcessor.minimumInterCycleSleep", 0); + Configuration.getInstance().getIntegerWithDefault("PeriodicUpdateGraph.minimumInterCycleSleep", 0); private final boolean interCycleYield = - Configuration.getInstance().getBooleanWithDefault("UpdateGraphProcessor.interCycleYield", false); + Configuration.getInstance().getBooleanWithDefault("PeriodicUpdateGraph.interCycleYield", false); + + private final LogicalClockImpl logicalClock = new LogicalClockImpl(); /** * Encapsulates locking support. */ - private final UpdateGraphLock lock = UpdateGraphLock.create(LogicalClock.DEFAULT, ALLOW_UNIT_TEST_MODE); + private final UpdateGraphLock lock; /** - * When UpdateGraphProcessor.printDependencyInformation is set to true, the UpdateGraphProcessor will print debug + * When PeriodicUpdateGraph.printDependencyInformation is set to true, the PeriodicUpdateGraph will print debug * information for each notification that has dependency information; as well as which notifications have been * completed and are outstanding. */ private final boolean printDependencyInformation = - Configuration.getInstance().getBooleanWithDefault("UpdateGraphProcessor.printDependencyInformation", false); + Configuration.getInstance().getBooleanWithDefault("PeriodicUpdateGraph.printDependencyInformation", false); + + private final String name; + + public PeriodicUpdateGraph( + final String name, + final boolean allowUnitTestMode, + final long targetCycleDurationMillis, + final long minimumCycleDurationToLogNanos, + final int numUpdateThreads) { + this.name = name; + this.ALLOW_UNIT_TEST_MODE = allowUnitTestMode; + this.DEFAULT_TARGET_CYCLE_DURATION_MILLIS = targetCycleDurationMillis; + this.targetCycleDurationMillis = targetCycleDurationMillis; + this.minimumCycleDurationToLogNanos = minimumCycleDurationToLogNanos; + this.lock = UpdateGraphLock.create(this, ALLOW_UNIT_TEST_MODE); - UpdateGraphProcessor() { notificationProcessor = makeNotificationProcessor(); jvmIntrospectionContext = new JvmIntrospectionContext(); refreshThread = new Thread(ThreadInitializationFactory.wrapRunnable(() -> { configureRefreshThread(); - // noinspection InfiniteLoopStatement - while (true) { + while (running) { Assert.eqFalse(ALLOW_UNIT_TEST_MODE, "ALLOW_UNIT_TEST_MODE"); refreshTablesAndFlushNotifications(); } - }), "UpdateGraphProcessor." + name() + ".refreshThread"); + }), "PeriodicUpdateGraph." + name + ".refreshThread"); refreshThread.setDaemon(true); - final int updateThreads = - Configuration.getInstance().getIntegerWithDefault("UpdateGraphProcessor.updateThreads", -1); - if (updateThreads <= 0) { + if (numUpdateThreads <= 0) { this.updateThreads = Runtime.getRuntime().availableProcessors(); } else { - this.updateThreads = updateThreads; + this.updateThreads = numUpdateThreads; } } + public String getName() { + return name; + } + + public UpdateGraph getUpdateGraph() { + return this; + } + @Override public LogOutput append(@NotNull final LogOutput logOutput) { - return logOutput.append("UpdateGraphProcessor-").append(name()); + return logOutput.append("PeriodicUpdateGraph-").append(name); } @Override @@ -291,11 +320,16 @@ public String toString() { return new LogOutputStringImpl().append(this).toString(); } + @Override + public LogicalClock clock() { + return logicalClock; + } + @NotNull private NotificationProcessor makeNotificationProcessor() { if (updateThreads > 1) { - final ThreadFactory threadFactory = new UpdateGraphProcessorThreadFactory( - new ThreadGroup("UpdateGraphProcessor-updateExecutors"), "updateExecutor"); + final ThreadFactory threadFactory = new NotificationProcessorThreadFactory( + new ThreadGroup("PeriodicUpdateGraph-updateExecutors"), "updateExecutor"); return new ConcurrentNotificationProcessor(threadFactory, updateThreads); } else { return new QueueNotificationProcessor(); @@ -305,8 +339,8 @@ private NotificationProcessor makeNotificationProcessor() { @TestUseOnly private NotificationProcessor makeRandomizedNotificationProcessor(final Random random, final int nThreads, final int notificationStartDelay) { - final UpdateGraphProcessorThreadFactory threadFactory = new UpdateGraphProcessorThreadFactory( - new ThreadGroup("UpdateGraphProcessor-randomizedUpdatedExecutors"), "randomizedUpdateExecutor"); + final ThreadFactory threadFactory = new NotificationProcessorThreadFactory( + new ThreadGroup("PeriodicUpdateGraph-randomizedUpdatedExecutors"), "randomizedUpdateExecutor"); return new ConcurrentNotificationProcessor(threadFactory, nThreads) { private Notification addRandomDelay(@NotNull final Notification notification) { @@ -349,14 +383,14 @@ public void submitAll(@NotNull IntrusiveDoublyLinkedQueue notifica * Retrieve the number of update threads. * *

- * The UpdateGraphProcessor has a configurable number of update processing threads. The number of threads is exposed + * The PeriodicUpdateGraph has a configurable number of update processing threads. The number of threads is exposed * in your method to enable you to partition a query based on the number of threads. *

* * @return the number of update threads configured. */ - @SuppressWarnings("unused") - public int getUpdateThreads() { + @Override + public int parallelismFactor() { if (notificationProcessor == null) { return updateThreads; } else if (notificationProcessor instanceof ConcurrentNotificationProcessor) { @@ -370,7 +404,7 @@ public int getUpdateThreads() { /** *

- * Get the shared lock for this {@link UpdateGraphProcessor}. + * Get the shared lock for this {@link PeriodicUpdateGraph}. *

* Using this lock will prevent run processing from proceeding concurrently, but will allow other read-only * processing to proceed. @@ -380,7 +414,7 @@ public int getUpdateThreads() { * This lock does not support {@link java.util.concurrent.locks.Lock#newCondition()}. Use the exclusive * lock if you need to wait on events that are driven by run processing. * - * @return The shared lock for this {@link UpdateGraphProcessor} + * @return The shared lock for this {@link PeriodicUpdateGraph} */ public AwareFunctionalLock sharedLock() { return lock.sharedLock(); @@ -388,7 +422,7 @@ public AwareFunctionalLock sharedLock() { /** *

- * Get the exclusive lock for this {@link UpdateGraphProcessor}. + * Get the exclusive lock for this {@link PeriodicUpdateGraph}. *

* Using this lock will prevent run or read-only processing from proceeding concurrently. *

@@ -399,7 +433,7 @@ public AwareFunctionalLock sharedLock() { *

* This lock does support {@link java.util.concurrent.locks.Lock#newCondition()}. * - * @return The exclusive lock for this {@link UpdateGraphProcessor} + * @return The exclusive lock for this {@link PeriodicUpdateGraph} */ public AwareFunctionalLock exclusiveLock() { return lock.exclusiveLock(); @@ -412,103 +446,28 @@ public AwareFunctionalLock exclusiveLock() { * * @return whether this is one of our run threads. */ - public boolean isRefreshThread() { - return isRefreshThread.get(); + @Override + public boolean currentThreadProcessesUpdates() { + return isUpdateThread.get(); } - /** - *

- * If we are establishing a new table operation, on a refreshing table without the UpdateGraphProcessor lock; then - * we are likely committing a grievous error, but one that will only occasionally result in us getting the wrong - * answer or if we are lucky an assertion. This method is called from various query operations that should not be - * established without the UGP lock. - *

- * - *

- * The run thread pool threads are allowed to instantiate operations, even though that thread does not have the - * lock; because they are protected by the main run thread and dependency tracking. - *

- * - *

- * If you are sure that you know what you are doing better than the query engine, you may call - * {@link #setCheckTableOperations(boolean)} to set a thread local variable bypassing this check. - *

- */ - public void checkInitiateTableOperation() { - if (!getCheckTableOperations() || exclusiveLock().isHeldByCurrentThread() - || sharedLock().isHeldByCurrentThread() || isRefreshThread()) { - return; - } - throw new IllegalStateException( - "May not initiate table operations: UGP exclusiveLockHeld=" + exclusiveLock().isHeldByCurrentThread() - + ", sharedLockHeld=" + sharedLock().isHeldByCurrentThread() - + ", refreshThread=" + isRefreshThread()); + @Override + public boolean serialTableOperationsSafe() { + return serialTableOperationsSafe.get(); } - /** - * If you know that the table operations you are performing are indeed safe, then call this method with false to - * disable table operation checking. Conversely, if you want to enforce checking even if the configuration - * disagrees; call it with true. - * - * @param value the new value of check table operations - * @return the old value of check table operations - */ - @SuppressWarnings("unused") - public boolean setCheckTableOperations(boolean value) { - final boolean old = checkTableOperations.get(); - checkTableOperations.set(value); + @Override + public boolean setSerialTableOperationsSafe(final boolean newValue) { + final boolean old = serialTableOperationsSafe.get(); + serialTableOperationsSafe.set(newValue); return old; } - /** - * Execute the supplied code while table operations are unchecked. - * - * @param supplier the function to run - * @return the result of supplier - */ - @SuppressWarnings("unused") - public T doUnchecked(Supplier supplier) { - final boolean old = getCheckTableOperations(); - try { - setCheckTableOperations(false); - return supplier.get(); - } finally { - setCheckTableOperations(old); - } - } - - /** - * Execute the supplied code while table operations are unchecked. - * - * @param runnable the function to run - */ - @SuppressWarnings("unused") - public void doUnchecked(Runnable runnable) { - final boolean old = getCheckTableOperations(); - try { - setCheckTableOperations(false); - runnable.run(); - } finally { - setCheckTableOperations(old); - } - } - - /** - * Should this thread check table operations for safety with respect to the update lock? - * - * @return if we should check table operations. - */ - public boolean getCheckTableOperations() { - return checkTableOperations.get(); - } - - /** - *

* Set the target duration of an update cycle, including the updating phase and the idle phase. This is also the * target interval between the start of one cycle and the start of the next. *

- * Can be reset to default via {@link #resetCycleDuration()}. + * Can be reset to default via {@link #resetTargetCycleDuration()}. * * @implNote Any target cycle duration {@code < 0} will be clamped to 0. * @@ -524,20 +483,18 @@ public void setTargetCycleDurationMillis(final long targetCycleDurationMillis) { * * @return The {@link #setTargetCycleDurationMillis(long) current} target cycle duration */ - @SuppressWarnings("unused") public long getTargetCycleDurationMillis() { return targetCycleDurationMillis; } /** - * Resets the run cycle time to the default target configured via the - * {@value #DEFAULT_TARGET_CYCLE_DURATION_MILLIS_PROP} property. + * Resets the run cycle time to the default target configured via the {@link Builder} setting. * - * @implNote If the {@value #DEFAULT_TARGET_CYCLE_DURATION_MILLIS_PROP} property is not set, this value defaults to - * 1000ms. + * @implNote If the {@link Builder#targetCycleDurationMillis(long)} property is not set, this value defaults to + * {@link Builder#DEFAULT_TARGET_CYCLE_DURATION_MILLIS_PROP} which defaults to 1000ms. */ @SuppressWarnings("unused") - public void resetCycleDuration() { + public void resetTargetCycleDuration() { targetCycleDurationMillis = DEFAULT_TARGET_CYCLE_DURATION_MILLIS; } @@ -556,10 +513,10 @@ public void enableUnitTestMode() { return; } if (!ALLOW_UNIT_TEST_MODE) { - throw new IllegalStateException("UpdateGraphProcessor.allowUnitTestMode=false"); + throw new IllegalStateException("PeriodicUpdateGraph.allowUnitTestMode=false"); } if (refreshThread.isAlive()) { - throw new IllegalStateException("UpdateGraphProcessor.refreshThread is executing!"); + throw new IllegalStateException("PeriodicUpdateGraph.refreshThread is executing!"); } lock.reset(); unitTestMode = true; @@ -602,36 +559,6 @@ public void setWatchDogTimeoutProcedure(LongConsumer procedure) { this.watchDogTimeoutProcedure = procedure; } - public void requestSignal(Condition updateGraphProcessorCondition) { - if (UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread()) { - updateGraphProcessorCondition.signalAll(); - } else { - // terminal notifications always run on the UGP thread - final Notification terminalNotification = new TerminalNotification() { - @Override - public void run() { - Assert.assertion(UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread(), - "UpdateGraphProcessor.DEFAULT.isHeldByCurrentThread()"); - updateGraphProcessorCondition.signalAll(); - } - - @Override - public boolean mustExecuteWithUgpLock() { - return true; - } - - @Override - public LogOutput append(LogOutput output) { - return output.append("SignalNotification(") - .append(System.identityHashCode(updateGraphProcessorCondition)).append(")"); - } - }; - synchronized (terminalNotifications) { - terminalNotifications.offer(terminalNotification); - } - } - } - private class WatchdogJob extends TimedJob { @Override public void timedOut() { @@ -647,17 +574,27 @@ public void timedOut() { * @implNote Must not be in {@link #enableUnitTestMode() unit test} mode. */ public void start() { + Assert.eqTrue(running, "running"); Assert.eqFalse(unitTestMode, "unitTestMode"); Assert.eqFalse(ALLOW_UNIT_TEST_MODE, "ALLOW_UNIT_TEST_MODE"); synchronized (refreshThread) { if (!refreshThread.isAlive()) { - log.info().append("UpdateGraphProcessor starting with ").append(updateThreads) + log.info().append("PeriodicUpdateGraph starting with ").append(updateThreads) .append(" notification processing threads").endl(); refreshThread.start(); } } } + /** + * Begins the process to stop all processing threads and forces ReferenceCounted sources to a reference count of + * zero. + */ + public void stop() { + running = false; + notificationProcessor.shutdown(); + } + /** * Add a table to the list of tables to run and mark it as {@link DynamicNode#setRefreshing(boolean) refreshing} if * it was a {@link DynamicNode}. @@ -668,12 +605,16 @@ public void start() { */ @Override public void addSource(@NotNull final Runnable updateSource) { + if (!running) { + throw new IllegalStateException("PeriodicUpdateGraph is no longer running"); + } + if (updateSource instanceof DynamicNode) { ((DynamicNode) updateSource).setRefreshing(true); } if (!ALLOW_UNIT_TEST_MODE) { - // if we are in unit test mode we never want to start the UGP + // if we are in unit test mode we never want to start the UpdateGraph sources.add(updateSource); start(); } @@ -725,7 +666,7 @@ public void addNotification(@NotNull final Notification notification) { logDependencies().append(Thread.currentThread().getName()).append(": Adding notification ") .append(notification).endl(); synchronized (pendingNormalNotifications) { - Assert.eq(LogicalClock.DEFAULT.currentState(), "LogicalClock.DEFAULT.currentState()", + Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", LogicalClock.State.Updating, "LogicalClock.State.Updating"); pendingNormalNotifications.offer(notification); } @@ -747,7 +688,7 @@ public boolean maybeAddNotification(@NotNull final Notification notification, fi synchronized (pendingNormalNotifications) { // Note that the clock is advanced to idle under the pendingNormalNotifications lock, after which point no // further normal notifications will be processed on this cycle. - final long logicalClockValue = LogicalClock.DEFAULT.currentValue(); + final long logicalClockValue = logicalClock.currentValue(); if (LogicalClock.getState(logicalClockValue) == LogicalClock.State.Updating && LogicalClock.getStep(logicalClockValue) == deliveryStep) { pendingNormalNotifications.offer(notification); @@ -774,6 +715,7 @@ public boolean satisfied(final long step) { * * @see #addNotification(Notification) */ + @Override public void addNotifications(@NotNull final Collection notifications) { synchronized (pendingNormalNotifications) { synchronized (terminalNotifications) { @@ -794,6 +736,14 @@ public void requestRefresh() { } } + /** + * @return Whether this UpdateGraph has a mechanism that supports refreshing + */ + @Override + public boolean supportsRefreshing() { + return true; + } + /** * Clear all monitored tables and enqueued notifications to support {@link #enableUnitTestMode() unit-tests}. * @@ -828,7 +778,7 @@ public void resetForUnitTests(boolean after, synchronized (pendingNormalNotifications) { pendingNormalNotifications.clear(); } - isRefreshThread.remove(); + isUpdateThread.remove(); if (randomizedNotifications) { notificationProcessor = makeRandomizedNotificationProcessor(notificationRandomizer, maxRandomizedThreadCount, notificationStartDelay); @@ -838,8 +788,8 @@ public void resetForUnitTests(boolean after, synchronized (terminalNotifications) { terminalNotifications.clear(); } - LogicalClock.DEFAULT.resetForUnitTests(); - sourcesLastSatisfiedStep = LogicalClock.DEFAULT.currentStep(); + logicalClock.resetForUnitTests(); + sourcesLastSatisfiedStep = logicalClock.currentStep(); refreshScope = null; if (after) { @@ -853,13 +803,13 @@ public void resetForUnitTests(boolean after, ensureUnlocked("unit test reset thread", errors); if (refreshThread.isAlive()) { - errors.add("UGP refreshThread isAlive"); + errors.add("UpdateGraph refreshThread isAlive"); } try { unitTestRefreshThreadPool.submit(() -> ensureUnlocked("unit test run pool thread", errors)).get(); } catch (InterruptedException | ExecutionException e) { - errors.add("Failed to ensure UGP unlocked from unit test run thread pool: " + e); + errors.add("Failed to ensure UpdateGraph unlocked from unit test run thread pool: " + e); } unitTestRefreshThreadPool.shutdownNow(); try { @@ -872,7 +822,8 @@ public void resetForUnitTests(boolean after, unitTestRefreshThreadPool = makeUnitTestRefreshExecutor(); if (!errors.isEmpty()) { - final String message = "UGP reset for unit tests reported errors:\n\t" + String.join("\n\t", errors); + final String message = + "UpdateGraph reset for unit tests reported errors:\n\t" + String.join("\n\t", errors); System.err.println(message); if (after) { throw new IllegalStateException(message); @@ -883,8 +834,8 @@ public void resetForUnitTests(boolean after, } /** - * Begin the next {@link LogicalClock#startUpdateCycle() update cycle} while in {@link #enableUnitTestMode() - * unit-test} mode. Note that this happens on a simulated UGP run thread, rather than this thread. + * Begin the next {@link LogicalClockImpl#startUpdateCycle() update cycle} while in {@link #enableUnitTestMode() + * unit-test} mode. Note that this happens on a simulated UpdateGraph run thread, rather than this thread. */ @TestUseOnly public void startCycleForUnitTests() { @@ -899,21 +850,21 @@ public void startCycleForUnitTests() { @TestUseOnly private void startCycleForUnitTestsInternal() { // noinspection AutoBoxing - isRefreshThread.set(true); - UpdateGraphProcessor.DEFAULT.exclusiveLock().lock(); + isUpdateThread.set(true); + exclusiveLock().lock(); Assert.eqNull(refreshScope, "refreshScope"); refreshScope = new LivenessScope(); LivenessScopeStack.push(refreshScope); - LogicalClock.DEFAULT.startUpdateCycle(); - sourcesLastSatisfiedStep = LogicalClock.DEFAULT.currentStep(); + logicalClock.startUpdateCycle(); + sourcesLastSatisfiedStep = logicalClock.currentStep(); } /** * Do the second half of the update cycle, including flushing notifications, and completing the - * {@link LogicalClock#completeUpdateCycle() LogicalClock} update cycle. Note that this happens on a simulated UGP - * run thread, rather than this thread. + * {@link LogicalClockImpl#completeUpdateCycle() LogicalClock} update cycle. Note that this happens on a simulated + * UpdateGraph run thread, rather than this thread. */ @TestUseOnly public void completeCycleForUnitTests() { @@ -934,8 +885,8 @@ private void completeCycleForUnitTestsInternal() { refreshScope = null; } - UpdateGraphProcessor.DEFAULT.exclusiveLock().unlock(); - isRefreshThread.remove(); + exclusiveLock().unlock(); + isUpdateThread.remove(); }) { flushNotificationsAndCompleteCycle(); } @@ -959,7 +910,7 @@ public void runWithinUnitTestCycle(ThrowingRunnable run } /** - * Refresh an update source on a simulated UGP run thread, rather than this thread. + * Refresh an update source on a simulated UpdateGraph run thread, rather than this thread. * * @param updateSource The update source to run */ @@ -974,8 +925,8 @@ public void refreshUpdateSourceForUnitTests(@NotNull final Runnable updateSource } /** - * Flush a single notification from the UGP queue. Note that this happens on a simulated UGP run thread, rather than - * this thread. + * Flush a single notification from the UpdateGraph queue. Note that this happens on a simulated UpdateGraph run + * thread, rather than this thread. * * @return whether a notification was found in the queue */ @@ -1009,9 +960,10 @@ public boolean flushOneNotificationForUnitTestsInternal() { final Notification notification = it.next(); Assert.eqFalse(notification.isTerminal(), "notification.isTerminal()"); - Assert.eqFalse(notification.mustExecuteWithUgpLock(), "notification.mustExecuteWithUgpLock()"); + Assert.eqFalse(notification.mustExecuteWithUpdateGraphLock(), + "notification.mustExecuteWithUpdateGraphLock()"); - if (notification.canExecute(LogicalClock.DEFAULT.currentStep())) { + if (notification.canExecute(logicalClock.currentStep())) { satisfied = notification; it.remove(); break; @@ -1031,8 +983,8 @@ public boolean flushOneNotificationForUnitTestsInternal() { } /** - * Flush all the normal notifications from the UGP queue. Note that the flushing happens on a simulated UGP run - * thread, rather than this thread. + * Flush all the normal notifications from the UpdateGraph queue. Note that the flushing happens on a simulated + * UpdateGraph run thread, rather than this thread. */ @TestUseOnly public void flushAllNormalNotificationsForUnitTests() { @@ -1040,8 +992,8 @@ public void flushAllNormalNotificationsForUnitTests() { } /** - * Flush all the normal notifications from the UGP queue, continuing until {@code done} returns {@code true}. Note - * that the flushing happens on a simulated UGP run thread, rather than this thread. + * Flush all the normal notifications from the UpdateGraph queue, continuing until {@code done} returns + * {@code true}. Note that the flushing happens on a simulated UpdateGraph run thread, rather than this thread. * * @param done Function to determine when we can stop waiting for new notifications * @return A Runnable that may be used to wait for the concurrent flush job to complete @@ -1100,11 +1052,11 @@ private void flushNotificationsAndCompleteCycle() { // satisfaction are delivered first to the pendingNormalNotifications queue, and hence will not be processed // until we advance to the flush* methods. // TODO: If and when we properly integrate update sources into the dependency tracking system, we can - // discontinue this distinct phase, along with the requirement to treat the UGP itself as a Dependency. + // discontinue this distinct phase, along with the requirement to treat the UpdateGraph itself as a Dependency. // Until then, we must delay the beginning of "normal" notification processing until all update sources are // done. See IDS-8039. notificationProcessor.doAllWork(); - sourcesLastSatisfiedStep = LogicalClock.DEFAULT.currentStep(); + sourcesLastSatisfiedStep = logicalClock.currentStep(); flushNormalNotificationsAndCompleteCycle(); flushTerminalNotifications(); @@ -1128,7 +1080,7 @@ private void flushNormalNotificationsAndCompleteCycle() { // We complete the cycle here before releasing the lock on pendingNotifications, so that // maybeAddNotification can detect scenarios where the notification cannot be delivered on the // desired step. - LogicalClock.DEFAULT.completeUpdateCycle(); + logicalClock.completeUpdateCycle(); break; } } @@ -1142,7 +1094,8 @@ private void flushNormalNotificationsAndCompleteCycle() { final Notification notification = it.next(); Assert.eqFalse(notification.isTerminal(), "notification.isTerminal()"); - Assert.eqFalse(notification.mustExecuteWithUgpLock(), "notification.mustExecuteWithUgpLock()"); + Assert.eqFalse(notification.mustExecuteWithUpdateGraphLock(), + "notification.mustExecuteWithUpdateGraphLock()"); final boolean satisfied = notification.canExecute(sourcesLastSatisfiedStep); if (satisfied) { @@ -1182,7 +1135,7 @@ private void flushTerminalNotifications() { final Notification notification = it.next(); Assert.assertion(notification.isTerminal(), "notification.isTerminal()"); - if (!notification.mustExecuteWithUgpLock()) { + if (!notification.mustExecuteWithUpdateGraphLock()) { it.remove(); // for the single threaded queue case; this enqueues the notification; // for the executor service case, this causes the notification to be kicked off @@ -1287,10 +1240,10 @@ private void runNotification(@NotNull final Notification notification) { .endl(); } catch (final Exception e) { log.error().append(Thread.currentThread().getName()) - .append(": Exception while executing UpdateGraphProcessor notification: ").append(notification) + .append(": Exception while executing PeriodicUpdateGraph notification: ").append(notification) .append(": ").append(e).endl(); ProcessEnvironment.getGlobalFatalErrorReporter() - .report("Exception while processing UpdateGraphProcessor notification", e); + .report("Exception while processing PeriodicUpdateGraph notification", e); } } @@ -1711,8 +1664,8 @@ private void waitForEndTime(final long expectedEndTime, final Scheduler timeSour } /** - * Refresh all the update sources within an {@link LogicalClock update cycle} after the UGP has been locked. At the - * end of the updates all {@link Notification notifications} will be flushed. + * Refresh all the update sources within an {@link LogicalClock update cycle} after the UpdateGraph has been locked. + * At the end of the updates all {@link Notification notifications} will be flushed. */ private void refreshAllTables() { refreshRequested.set(false); @@ -1736,18 +1689,18 @@ private void doRefresh(@NotNull final Runnable refreshFunction) { } Assert.eqNull(refreshScope, "refreshScope"); refreshScope = new LivenessScope(); - final long updatingCycleValue = LogicalClock.DEFAULT.startUpdateCycle(); - logDependencies().append("Beginning UpdateGraphProcessor cycle step=") - .append(LogicalClock.DEFAULT.currentStep()).endl(); + final long updatingCycleValue = logicalClock.startUpdateCycle(); + logDependencies().append("Beginning PeriodicUpdateGraph cycle step=") + .append(logicalClock.currentStep()).endl(); try (final SafeCloseable ignored = LivenessScopeStack.open(refreshScope, true)) { refreshFunction.run(); flushNotificationsAndCompleteCycle(); } finally { - LogicalClock.DEFAULT.ensureUpdateCycleCompleted(updatingCycleValue); + logicalClock.ensureUpdateCycleCompleted(updatingCycleValue); refreshScope = null; } - logDependencies().append("Completed UpdateGraphProcessor cycle step=") - .append(LogicalClock.DEFAULT.currentStep()).endl(); + logDependencies().append("Completed PeriodicUpdateGraph cycle step=") + .append(logicalClock.currentStep()).endl(); }); } @@ -1804,9 +1757,9 @@ public LogEntry logDependencies() { } } - private class UpdateGraphProcessorThreadFactory extends NamingThreadFactory { - private UpdateGraphProcessorThreadFactory(@NotNull final ThreadGroup threadGroup, @NotNull final String name) { - super(threadGroup, UpdateGraphProcessor.class, name, true); + private class NotificationProcessorThreadFactory extends NamingThreadFactory { + private NotificationProcessorThreadFactory(@NotNull final ThreadGroup threadGroup, @NotNull final String name) { + super(threadGroup, PeriodicUpdateGraph.class, name, true); } @Override @@ -1822,7 +1775,7 @@ public Thread newThread(@NotNull final Runnable r) { private void ensureUnlocked(@NotNull final String callerDescription, @Nullable final List errors) { if (exclusiveLock().isHeldByCurrentThread()) { if (errors != null) { - errors.add(callerDescription + ": UGP exclusive lock is still held"); + errors.add(callerDescription + ": UpdateGraph exclusive lock is still held"); } while (exclusiveLock().isHeldByCurrentThread()) { exclusiveLock().unlock(); @@ -1830,7 +1783,7 @@ private void ensureUnlocked(@NotNull final String callerDescription, @Nullable f } if (sharedLock().isHeldByCurrentThread()) { if (errors != null) { - errors.add(callerDescription + ": UGP shared lock is still held"); + errors.add(callerDescription + ": UpdateGraph shared lock is still held"); } while (sharedLock().isHeldByCurrentThread()) { sharedLock().unlock(); @@ -1839,34 +1792,145 @@ private void ensureUnlocked(@NotNull final String callerDescription, @Nullable f } private ExecutorService makeUnitTestRefreshExecutor() { - return Executors.newFixedThreadPool(1, new UnitTestRefreshThreadFactory()); + return Executors.newFixedThreadPool(1, new UnitTestThreadFactory()); } @TestUseOnly - private class UnitTestRefreshThreadFactory extends NamingThreadFactory { + private class UnitTestThreadFactory extends NamingThreadFactory { - private UnitTestRefreshThreadFactory() { - super(UpdateGraphProcessor.class, "unitTestRefresh"); + private UnitTestThreadFactory() { + super(PeriodicUpdateGraph.class, "unitTestRefresh"); } @Override - public Thread newThread(@NotNull final Runnable runnable) { - final Thread thread = super.newThread(runnable); - final Thread.UncaughtExceptionHandler existing = thread.getUncaughtExceptionHandler(); - thread.setUncaughtExceptionHandler((final Thread errorThread, final Throwable throwable) -> { - ensureUnlocked("unit test run pool thread exception handler", null); - existing.uncaughtException(errorThread, throwable); + public Thread newThread(@NotNull final Runnable r) { + return super.newThread(() -> { + configureUnitTestRefreshThread(); + r.run(); }); - return thread; } } /** - * Configure the primary UGP thread or one of the auxiliary run threads. + * Configure the primary UpdateGraph thread or one of the auxiliary notification processing threads. */ private void configureRefreshThread() { SystemicObjectTracker.markThreadSystemic(); MultiChunkPool.enableDedicatedPoolForThisThread(); - isRefreshThread.set(true); + isUpdateThread.set(true); + // Install this UpdateGraph via ExecutionContext for refresh threads + // noinspection resource + ExecutionContext.newBuilder().setUpdateGraph(this).build().open(); + } + + /** + * Configure threads to be used for unit test processing. + */ + private void configureUnitTestRefreshThread() { + final Thread currentThread = Thread.currentThread(); + final Thread.UncaughtExceptionHandler existing = currentThread.getUncaughtExceptionHandler(); + currentThread.setUncaughtExceptionHandler((final Thread errorThread, final Throwable throwable) -> { + ensureUnlocked("unit test run pool thread exception handler", null); + existing.uncaughtException(errorThread, throwable); + }); + isUpdateThread.set(true); + // Install this UpdateGraph via ExecutionContext for refresh threads + // noinspection resource + ExecutionContext.newBuilder().setUpdateGraph(this).build().open(); + } + + public void takeAccumulatedCycleStats(AccumulatedCycleStats updateGraphAccumCycleStats) { + accumulatedCycleStats.take(updateGraphAccumCycleStats); + } + + public static final class Builder { + private final boolean allowUnitTestMode = + Configuration.getInstance().getBooleanWithDefault(ALLOW_UNIT_TEST_MODE_PROP, false); + private long targetCycleDurationMillis = + Configuration.getInstance().getIntegerWithDefault(DEFAULT_TARGET_CYCLE_DURATION_MILLIS_PROP, 1000); + private long minimumCycleDurationToLogNanos = TimeUnit.MILLISECONDS.toNanos( + Configuration.getInstance().getIntegerWithDefault(MINIMUM_CYCLE_DURATION_TO_LOG_MILLIS_PROP, 25)); + + private String name; + private int numUpdateThreads = -1; + + public Builder(String name) { + this.name = name; + } + + /** + * Set the target duration of an update cycle, including the updating phase and the idle phase. This is also the + * target interval between the start of one cycle and the start of the next. + * + * @implNote Any target cycle duration {@code < 0} will be clamped to 0. + * + * @param targetCycleDurationMillis The target duration for update cycles in milliseconds + * @return this builder + */ + public Builder targetCycleDurationMillis(long targetCycleDurationMillis) { + this.targetCycleDurationMillis = targetCycleDurationMillis; + return this; + } + + /** + * Set the minimum duration of an update cycle that should be logged at the INFO level. + * + * @param minimumCycleDurationToLogNanos threshold to log a slow cycle + * @return this builder + */ + public Builder minimumCycleDurationToLogNanos(long minimumCycleDurationToLogNanos) { + this.minimumCycleDurationToLogNanos = minimumCycleDurationToLogNanos; + return this; + } + + /** + * Sets the number of threads to use in the update graph processor. Values < 0 indicate to use one thread per + * available processor. + * + * @param numUpdateThreads number of threads to use in update processing + * @return this builder + */ + public Builder numUpdateThreads(int numUpdateThreads) { + this.numUpdateThreads = numUpdateThreads; + return this; + } + + /** + * Constructs and returns a PeriodicUpdateGraph. It is an error to do so an instance already exists with the + * name provided to this builder. + * + * @return the new PeriodicUpdateGraph + * @throws IllegalStateException if a PeriodicUpdateGraph with the provided name already exists + */ + public PeriodicUpdateGraph build() { + synchronized (INSTANCES) { + if (INSTANCES.containsKey(name)) { + throw new IllegalStateException( + String.format("PeriodicUpdateGraph with name %s already exists", name)); + } + final PeriodicUpdateGraph newUpdateGraph = construct(); + INSTANCES.put(name, newUpdateGraph); + return newUpdateGraph; + } + } + + /** + * Returns an existing PeriodicUpdateGraph with the name provided to this Builder, if one exists, else returns a + * new PeriodicUpdateGraph. + * + * @return the PeriodicUpdateGraph + */ + public PeriodicUpdateGraph existingOrBuild() { + return INSTANCES.putIfAbsent(name, n -> construct()); + } + + private PeriodicUpdateGraph construct() { + return new PeriodicUpdateGraph( + name, + allowUnitTestMode, + targetCycleDurationMillis, + minimumCycleDurationToLogNanos, + numUpdateThreads); + } } } diff --git a/engine/table/src/main/java/io/deephaven/engine/util/AbstractScriptSession.java b/engine/table/src/main/java/io/deephaven/engine/util/AbstractScriptSession.java index b7be4b04401..47ca7f47442 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/AbstractScriptSession.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/AbstractScriptSession.java @@ -18,6 +18,7 @@ import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.context.QueryScopeParam; import io.deephaven.engine.table.hierarchical.HierarchicalTable; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.plugin.type.ObjectType; import io.deephaven.plugin.type.ObjectTypeLookup; import io.deephaven.util.SafeCloseable; @@ -65,7 +66,10 @@ private static void createOrClearDirectory(final File directory) { private final ObjectTypeLookup objectTypeLookup; private final Listener changeListener; - protected AbstractScriptSession(ObjectTypeLookup objectTypeLookup, @Nullable Listener changeListener) { + protected AbstractScriptSession( + UpdateGraph updateGraph, + ObjectTypeLookup objectTypeLookup, + @Nullable Listener changeListener) { this.objectTypeLookup = objectTypeLookup; this.changeListener = changeListener; @@ -82,6 +86,7 @@ protected AbstractScriptSession(ObjectTypeLookup objectTypeLookup, @Nullable Lis .newQueryLibrary() .setQueryScope(queryScope) .setQueryCompiler(compilerContext) + .setUpdateGraph(updateGraph) .build(); } diff --git a/engine/table/src/main/java/io/deephaven/engine/util/GroovyDeephavenSession.java b/engine/table/src/main/java/io/deephaven/engine/util/GroovyDeephavenSession.java index cbbf31efead..edb39bf3059 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/GroovyDeephavenSession.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/GroovyDeephavenSession.java @@ -14,9 +14,9 @@ import io.deephaven.engine.context.QueryCompiler; import io.deephaven.configuration.Configuration; import io.deephaven.engine.exceptions.CancellationException; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.context.QueryScope; import io.deephaven.api.util.NameValidator; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.GroovyDeephavenSession.GroovySnapshot; import io.deephaven.engine.util.scripts.ScriptPathLoader; import io.deephaven.engine.util.scripts.ScriptPathLoaderState; @@ -123,17 +123,20 @@ private String getNextScriptClassName() { private transient SourceClosure sourceClosure; private transient SourceClosure sourceOnceClosure; - public GroovyDeephavenSession(ObjectTypeLookup objectTypeLookup, final RunScripts runScripts) - throws IOException { - this(objectTypeLookup, null, runScripts); + public GroovyDeephavenSession( + final UpdateGraph updateGraph, + final ObjectTypeLookup objectTypeLookup, + final RunScripts runScripts) throws IOException { + this(updateGraph, objectTypeLookup, null, runScripts); } public GroovyDeephavenSession( + final UpdateGraph updateGraph, ObjectTypeLookup objectTypeLookup, @Nullable final Listener changeListener, final RunScripts runScripts) throws IOException { - super(objectTypeLookup, changeListener); + super(updateGraph, objectTypeLookup, changeListener); this.scriptFinder = new ScriptFinder(DEFAULT_SCRIPT_PATH); @@ -225,7 +228,8 @@ protected void evaluate(String command, String scriptName) { updateClassloader(lastCommand); try { - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLockedInterruptibly(() -> evaluateCommand(lastCommand)); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .doLockedInterruptibly(() -> evaluateCommand(lastCommand)); } catch (InterruptedException e) { throw new CancellationException(e.getMessage() != null ? e.getMessage() : "Query interrupted", maybeRewriteStackTrace(scriptName, currentScriptName, e, lastCommand, commandPrefix)); diff --git a/engine/table/src/main/java/io/deephaven/engine/util/NoLanguageDeephavenSession.java b/engine/table/src/main/java/io/deephaven/engine/util/NoLanguageDeephavenSession.java index 18cf66dfa4d..567d165dff9 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/NoLanguageDeephavenSession.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/NoLanguageDeephavenSession.java @@ -4,6 +4,7 @@ package io.deephaven.engine.util; import io.deephaven.engine.context.QueryScope; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.scripts.ScriptPathLoader; import io.deephaven.engine.util.scripts.ScriptPathLoaderState; import org.jetbrains.annotations.NotNull; @@ -25,12 +26,12 @@ public class NoLanguageDeephavenSession extends AbstractScriptSession variables; - public NoLanguageDeephavenSession() { - this(SCRIPT_TYPE); + public NoLanguageDeephavenSession(final UpdateGraph updateGraph) { + this(updateGraph, SCRIPT_TYPE); } - public NoLanguageDeephavenSession(final String scriptType) { - super(null, null); + public NoLanguageDeephavenSession(final UpdateGraph updateGraph, final String scriptType) { + super(updateGraph, null, null); this.scriptType = scriptType; variables = new LinkedHashMap<>(); diff --git a/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java b/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java index f90f500fe90..80473c8d827 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/TableTools.java @@ -8,6 +8,7 @@ import io.deephaven.base.clock.Clock; import io.deephaven.base.verify.Require; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSetFactory; @@ -15,7 +16,6 @@ import io.deephaven.engine.table.impl.perf.QueryPerformanceRecorder; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.TimeTable; import io.deephaven.engine.table.impl.replay.Replayer; @@ -847,7 +847,8 @@ public static Table timeTable(long periodNanos) { * @return time table */ public static Table timeTable(long periodNanos, ReplayerInterface replayer) { - return new TimeTable(UpdateGraphProcessor.DEFAULT, Replayer.getClock(replayer), null, periodNanos, false); + return new TimeTable(ExecutionContext.getContext().getUpdateGraph(), Replayer.getClock(replayer), + null, periodNanos, false); } /** @@ -858,7 +859,8 @@ public static Table timeTable(long periodNanos, ReplayerInterface replayer) { * @return time table */ public static Table timeTable(Instant startTime, long periodNanos) { - return new TimeTable(UpdateGraphProcessor.DEFAULT, DateTimeUtils.currentClock(), startTime, periodNanos, false); + return new TimeTable(ExecutionContext.getContext().getUpdateGraph(), DateTimeUtils.currentClock(), + startTime, periodNanos, false); } /** @@ -870,7 +872,8 @@ public static Table timeTable(Instant startTime, long periodNanos) { * @return time table */ public static Table timeTable(Instant startTime, long periodNanos, ReplayerInterface replayer) { - return new TimeTable(UpdateGraphProcessor.DEFAULT, Replayer.getClock(replayer), startTime, periodNanos, false); + return new TimeTable(ExecutionContext.getContext().getUpdateGraph(), Replayer.getClock(replayer), + startTime, periodNanos, false); } /** @@ -905,7 +908,7 @@ public static Table timeTable(String startTime, long periodNanos, ReplayerInterf * @return time table */ public static Table timeTable(Clock clock, Instant startTime, long periodNanos) { - return new TimeTable(UpdateGraphProcessor.DEFAULT, clock, startTime, periodNanos, false); + return new TimeTable(ExecutionContext.getContext().getUpdateGraph(), clock, startTime, periodNanos, false); } /** diff --git a/engine/table/src/main/java/io/deephaven/engine/util/TickSuppressor.java b/engine/table/src/main/java/io/deephaven/engine/util/TickSuppressor.java index 5f37b0dd5c9..38b65ec5c77 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/TickSuppressor.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/TickSuppressor.java @@ -6,7 +6,6 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.table.*; import io.deephaven.engine.table.impl.TableUpdateImpl; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.*; import io.deephaven.chunk.util.hashing.ChunkEquals; import io.deephaven.chunk.*; @@ -46,7 +45,7 @@ public static Table convertModificationsToAddsAndRemoves(Table input) { return input; } - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + input.getUpdateGraph().checkInitiateSerialTableOperation(); final QueryTable resultTable = new QueryTable(input.getDefinition(), input.getRowSet(), input.getColumnSourceMap()); @@ -94,7 +93,7 @@ public static Table removeSpuriousModifications(Table input) { return input; } - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + input.getUpdateGraph().checkInitiateSerialTableOperation(); final QueryTable coalesced = (QueryTable) input.coalesce(); diff --git a/engine/table/src/main/java/io/deephaven/engine/util/ToMapListener.java b/engine/table/src/main/java/io/deephaven/engine/util/ToMapListener.java index 3bbc7652d1f..7470fb1458e 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/ToMapListener.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/ToMapListener.java @@ -3,9 +3,9 @@ */ package io.deephaven.engine.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableUpdate; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.perf.QueryPerformanceRecorder; import io.deephaven.engine.table.impl.InstrumentedTableUpdateListenerAdapter; import io.deephaven.engine.table.ColumnSource; @@ -54,20 +54,17 @@ public static ToMapListener make(Table source, String keySourceName, String valu public static ToMapListener make(Table source, ColumnSource keySource, ColumnSource valueSource) { - // noinspection unchecked return QueryPerformanceRecorder.withNugget("ToMapListener", () -> new ToMapListener<>(source, keySource, valueSource)); } public static ToMapListener make(Table source, LongFunction keyProducer, LongFunction prevKeyProducer, LongFunction valueProducer, LongFunction prevValueProducer) { - // noinspection unchecked return QueryPerformanceRecorder.withNugget("ToMapListener", () -> new ToMapListener<>(source, keyProducer, prevKeyProducer, valueProducer, prevValueProducer)); } private ToMapListener(Table source, String keySourceName, String valueSourceName) { - // noinspection unchecked this(source, source.getColumnSource(keySourceName), source.getColumnSource(valueSourceName)); } @@ -107,7 +104,7 @@ public void onUpdate(final TableUpdate upstream) { upstream.modified().forAllRowKeys(adder); currentMap = newMap; - UpdateGraphProcessor.DEFAULT.addNotification(new Flusher()); + getUpdateGraph().addNotification(new Flusher()); } @Override @@ -158,7 +155,7 @@ public T get(K key, ColumnSource cs) { * @return the value associated with key */ public T get(K key, LongFunction valueProducer, LongFunction prevValueProducer) { - final LogicalClock.State state = LogicalClock.DEFAULT.currentState(); + final LogicalClock.State state = getUpdateGraph().clock().currentState(); final TObjectLongHashMap map; if (state == LogicalClock.State.Idle && (map = currentMap) != null) { final long row = map.get(key); diff --git a/engine/table/src/main/java/io/deephaven/engine/util/WindowCheck.java b/engine/table/src/main/java/io/deephaven/engine/util/WindowCheck.java index c060997fbe7..e4204268d2f 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/WindowCheck.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/WindowCheck.java @@ -22,12 +22,12 @@ import io.deephaven.engine.table.TableUpdate; import io.deephaven.engine.table.impl.TableUpdateImpl; import io.deephaven.engine.table.impl.sources.ReinterpretUtils; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.*; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; import io.deephaven.base.RAPriQueue; import gnu.trove.map.hash.TLongObjectHashMap; @@ -41,6 +41,7 @@ * Adds a Boolean column that is true if a Timestamp is within the specified window. */ public class WindowCheck { + private WindowCheck() {} /** @@ -79,12 +80,14 @@ private WindowListenerRecorder(Table parent, BaseTable dependent) { * See {@link WindowCheck#addTimeWindow(QueryTable, String, long, String)} for a description, the internal version * gives you access to the TimeWindowListener for unit testing purposes. * - * @param addToMonitor should we add this to the UpdateGraphProcessor + * @param addToMonitor should we add this to the PeriodicUpdateGraph * @return a pair of the result table and the TimeWindowListener that drives it */ static Pair addTimeWindowInternal(Clock clock, QueryTable table, String timestampColumn, long windowNanos, String inWindowColumn, boolean addToMonitor) { - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + if (table.isRefreshing()) { + table.getUpdateGraph().checkInitiateSerialTableOperation(); + } final Map> resultColumns = new LinkedHashMap<>(table.getColumnSourceMap()); final InWindowColumnSource inWindowColumnSource; @@ -107,7 +110,7 @@ static Pair addTimeWindowInternal(Clock clock, QueryT result.addParentReference(timeWindowListener); result.manage(table); if (addToMonitor) { - UpdateGraphProcessor.DEFAULT.addSource(timeWindowListener); + result.getUpdateGraph().addSource(timeWindowListener); } return new Pair<>(result, timeWindowListener); } @@ -116,7 +119,7 @@ static Pair addTimeWindowInternal(Clock clock, QueryT * The TimeWindowListener maintains a priority queue of rows that are within a configured window, when they pass out * of the window, the InWindow column is set to false and a modification tick happens. * - * It implements {@link Runnable}, so that we can be inserted into the {@link UpdateGraphProcessor}. + * It implements {@link Runnable}, so that we can be inserted into the {@link PeriodicUpdateGraph}. */ static class TimeWindowListener extends MergedListener implements Runnable { private final InWindowColumnSource inWindowColumnSource; @@ -396,7 +399,8 @@ void validateQueue() { @Override public void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + UpdateGraph updateGraph = result.getUpdateGraph(); + updateGraph.removeSource(this); } } @@ -419,15 +423,18 @@ private static class InWindowColumnSource extends AbstractColumnSource private final long windowNanos; private final ColumnSource timeStampSource; - private long prevTime = 0; - private long currentTime = 0; - private long clockStep = LogicalClock.DEFAULT.currentStep(); - private final long initialStep = clockStep; + private long prevTime; + private long currentTime; + private long clockStep; + private final long initialStep; InWindowColumnSource(Table table, String timestampColumn, long windowNanos) { super(Boolean.class); this.windowNanos = windowNanos; + clockStep = updateGraph.clock().currentStep(); + initialStep = clockStep; + final ColumnSource timeStampSource = table.getColumnSource(timestampColumn); if (!Instant.class.isAssignableFrom(timeStampSource.getType())) { throw new IllegalArgumentException(timestampColumn + " is not of type Instant!"); @@ -481,7 +488,7 @@ public boolean isImmutable() { private void captureTime() { prevTime = currentTime; currentTime = getTimeNanos(); - clockStep = LogicalClock.DEFAULT.currentStep(); + clockStep = updateGraph.clock().currentStep(); } @Override @@ -537,7 +544,7 @@ public void fillPrevChunk( } private long timeStampForPrev() { - final long currentStep = LogicalClock.DEFAULT.currentStep(); + final long currentStep = updateGraph.clock().currentStep(); return (clockStep < currentStep || clockStep == initialStep) ? currentTime : prevTime; } } diff --git a/engine/table/src/main/java/io/deephaven/engine/util/config/MutableInputTable.java b/engine/table/src/main/java/io/deephaven/engine/util/config/MutableInputTable.java index 88e7706e398..202256ca7ea 100644 --- a/engine/table/src/main/java/io/deephaven/engine/util/config/MutableInputTable.java +++ b/engine/table/src/main/java/io/deephaven/engine/util/config/MutableInputTable.java @@ -89,15 +89,15 @@ default void validateDelete(Table tableToDelete) { } } - // TODO (https://github.com/deephaven/deephaven-core/pull/3506): Update this advice for multiple update graphs, - // and on the blocking delete methods, as well. /** * Write {@code newData} to this table. Added rows with keys that match existing rows will instead replace those * rows, if supported. *

* This method will block until the rows are added. As a result, this method is not suitable for use from a * {@link io.deephaven.engine.table.TableListener table listener} or any other - * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification}-dispatched callback. + * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification}-dispatched callback + * dispatched by this MutableInputTable's {@link io.deephaven.engine.updategraph.UpdateGraph update graph}. It may + * be suitable to delete from another update graph if doing so does not introduce any cycles. * * @param newData The data to write to this table * @throws IOException If there is an error writing the data @@ -126,7 +126,9 @@ default void validateDelete(Table tableToDelete) { *

* This method will block until the rows are deleted. As a result, this method is not suitable for use from a * {@link io.deephaven.engine.table.TableListener table listener} or any other - * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification}-dispatched callback. + * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification}-dispatched callback + * dispatched by this MutableInputTable's {@link io.deephaven.engine.updategraph.UpdateGraph update graph}. It may + * be suitable to delete from another update graph if doing so does not introduce any cycles. * * @param table The rows to delete * @throws IOException If a problem occurred while deleting the rows. @@ -141,7 +143,9 @@ default void delete(Table table) throws IOException { *

* This method will block until the rows are deleted. As a result, this method is not suitable for use from a * {@link io.deephaven.engine.table.TableListener table listener} or any other - * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification}-dispatched callback. + * {@link io.deephaven.engine.updategraph.NotificationQueue.Notification notification}-dispatched callback + * dispatched by this MutableInputTable's {@link io.deephaven.engine.updategraph.UpdateGraph update graph}. It may + * be suitable to delete from another update graph if doing so does not introduce any cycles. * * @param table Table containing the rows to delete * @param rowSet The rows to delete diff --git a/engine/table/src/main/java/io/deephaven/stream/StreamToBlinkTableAdapter.java b/engine/table/src/main/java/io/deephaven/stream/StreamToBlinkTableAdapter.java index 61cd5e22441..5cbd99c7cc3 100644 --- a/engine/table/src/main/java/io/deephaven/stream/StreamToBlinkTableAdapter.java +++ b/engine/table/src/main/java/io/deephaven/stream/StreamToBlinkTableAdapter.java @@ -14,7 +14,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.table.impl.TableUpdateImpl; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.updategraph.UpdateSourceRegistrar; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.impl.QueryTable; @@ -42,7 +42,7 @@ * Adapter for converting streams of data into columnar Deephaven {@link Table tables} that conform to * {@link Table#BLINK_TABLE_ATTRIBUTE blink table} semantics. * - * @implNote The constructor publishes {@code this} to the {@link UpdateGraphProcessor} and thus cannot be subclassed. + * @implNote The constructor publishes {@code this} to the {@link PeriodicUpdateGraph} and thus cannot be subclassed. */ public class StreamToBlinkTableAdapter extends ReferenceCountedLivenessNode implements SafeCloseable, StreamConsumer, Runnable { diff --git a/engine/table/src/test/java/io/deephaven/engine/liveness/TestLiveness.java b/engine/table/src/test/java/io/deephaven/engine/liveness/TestLiveness.java index aa103bea36f..388a7c19a03 100644 --- a/engine/table/src/test/java/io/deephaven/engine/liveness/TestLiveness.java +++ b/engine/table/src/test/java/io/deephaven/engine/liveness/TestLiveness.java @@ -3,15 +3,13 @@ */ package io.deephaven.engine.liveness; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.util.SafeCloseable; import junit.framework.TestCase; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; /** @@ -19,28 +17,8 @@ */ public class TestLiveness { - private boolean oldCheckUgp; - private LivenessScope scope; - private SafeCloseable executionContext; - - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - oldCheckUgp = UpdateGraphProcessor.DEFAULT.setCheckTableOperations(false); - scope = new LivenessScope(); - LivenessScopeStack.push(scope); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @After - public void tearDown() throws Exception { - LivenessScopeStack.pop(scope); - scope.release(); - UpdateGraphProcessor.DEFAULT.setCheckTableOperations(oldCheckUgp); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - executionContext.close(); - } + @Rule + public final EngineCleanup framework = new EngineCleanup(); @Test public void testRecursion() { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/BenchmarkPlaypen.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/BenchmarkPlaypen.java index b5036a8f395..2ac7160ea45 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/BenchmarkPlaypen.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/BenchmarkPlaypen.java @@ -3,9 +3,11 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.io.logger.StreamLoggerImpl; import io.deephaven.parquet.table.ParquetTools; @@ -19,6 +21,8 @@ */ public class BenchmarkPlaypen { public static void main(String[] args) throws InterruptedException { + TestExecutionContext.createForUnitTests().open(); + if (args.length != 4) { usage(); } @@ -104,7 +108,7 @@ public static void main(String[] args) throws InterruptedException { final AutoTuningIncrementalReleaseFilter filter; if (incremental) { System.out.println("Running test incrementally."); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); + ExecutionContext.getContext().getUpdateGraph().cast().enableUnitTestMode(); filter = new AutoTuningIncrementalReleaseFilter(new StreamLoggerImpl(), 0, 1_000_000L, 1.0, true); input = viewed.where(filter); } else { @@ -144,8 +148,9 @@ public static void main(String[] args) throws InterruptedException { filter.start(); while (viewed.size() > input.size()) { final long initialSize = input.size(); - System.out.println("Running UpdateGraphProcessor cycle: " + input.size() + " / " + viewed.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(filter::run); + System.out.println("Running PeriodicUpdateGraph cycle: " + input.size() + " / " + viewed.size()); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(filter::run); if (initialSize == input.size()) { throw new RuntimeException("Did not increase size of input table during cycle!"); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/BlinkTableAggregationTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/BlinkTableAggregationTest.java index 01d5ea1f96a..6dda985d08b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/BlinkTableAggregationTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/BlinkTableAggregationTest.java @@ -4,14 +4,15 @@ package io.deephaven.engine.table.impl; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.rowset.TrackingWritableRowSet; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.SortedBy; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.sources.RedirectedColumnSource; @@ -113,9 +114,10 @@ private void doOperatorTest(@NotNull final UnaryOperator

operator, final ? RowSetFactory.empty() : RowSetFactory.fromRange(0, refreshSize - 1); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.startCycleForUnitTests(); try { - UpdateGraphProcessor.DEFAULT.refreshUpdateSourceForUnitTests(() -> { + updateGraph.refreshUpdateSourceForUnitTests(() -> { if (normalStepInserted.isNonempty()) { normal.getRowSet().writableCast().insert(normalStepInserted); normal.notifyListeners( @@ -125,7 +127,7 @@ private void doOperatorTest(@NotNull final UnaryOperator
operator, final } }); final RowSet finalBlinkLastInserted = blinkLastInserted; - UpdateGraphProcessor.DEFAULT.refreshUpdateSourceForUnitTests(() -> { + updateGraph.refreshUpdateSourceForUnitTests(() -> { if (blinkStepInserted.isNonempty() || finalBlinkLastInserted.isNonempty()) { if (blinkInternalRowSet != null) { blinkInternalRowSet.clear(); @@ -139,7 +141,7 @@ private void doOperatorTest(@NotNull final UnaryOperator
operator, final } }); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); } try { TstUtils.assertTableEquals(expected, addOnlyExpected); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/BlinkTableOperationsTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/BlinkTableOperationsTest.java index a9ce3c166a8..76eab5fbbc1 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/BlinkTableOperationsTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/BlinkTableOperationsTest.java @@ -4,14 +4,15 @@ package io.deephaven.engine.table.impl; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.rowset.TrackingWritableRowSet; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.sources.RedirectedColumnSource; import io.deephaven.engine.table.impl.util.*; @@ -113,10 +114,11 @@ private void doOperatorTest( ? RowSetFactory.empty() : RowSetFactory.fromRange(0, refreshSize - 1); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.startCycleForUnitTests(); try { final RowSet finalNormalLastInserted = normalLastInserted; - UpdateGraphProcessor.DEFAULT.refreshUpdateSourceForUnitTests(() -> { + updateGraph.refreshUpdateSourceForUnitTests(() -> { if (normalStepInserted.isNonempty() || finalNormalLastInserted.isNonempty()) { normal.getRowSet().writableCast().update(normalStepInserted, finalNormalLastInserted); normal.notifyListeners(new TableUpdateImpl(normalStepInserted.copy(), finalNormalLastInserted, @@ -124,7 +126,7 @@ private void doOperatorTest( } }); final RowSet finalBlinkLastInserted = blinkLastInserted; - UpdateGraphProcessor.DEFAULT.refreshUpdateSourceForUnitTests(() -> { + updateGraph.refreshUpdateSourceForUnitTests(() -> { if (blinkStepInserted.isNonempty() || finalBlinkLastInserted.isNonempty()) { if (blinkInternalRowSet != null) { blinkInternalRowSet.clear(); @@ -137,7 +139,7 @@ private void doOperatorTest( } }); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); } try { TstUtils.assertTableEquals(expected, blinkExpected); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/FuzzerTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/FuzzerTest.java index 73de3720c1b..5f321b33854 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/FuzzerTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/FuzzerTest.java @@ -7,20 +7,20 @@ import io.deephaven.base.clock.Clock; import io.deephaven.chunk.util.pools.ChunkPoolReleaseTracking; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; -import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.table.impl.util.RuntimeMemory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.TestClock; import io.deephaven.plugin.type.ObjectTypeLookup.NoOp; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.util.GroovyDeephavenSession; import io.deephaven.engine.util.GroovyDeephavenSession.RunScripts; -import io.deephaven.engine.liveness.LivenessScopeStack; -import io.deephaven.engine.table.impl.util.RuntimeMemory; -import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.test.types.SerialTest; import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.Nullable; @@ -74,7 +74,8 @@ private GroovyDeephavenSession getGroovySession() throws IOException { } private GroovyDeephavenSession getGroovySession(@Nullable Clock clock) throws IOException { - final GroovyDeephavenSession session = new GroovyDeephavenSession(NoOp.INSTANCE, RunScripts.serviceLoader()); + final GroovyDeephavenSession session = new GroovyDeephavenSession( + ExecutionContext.getContext().getUpdateGraph(), NoOp.INSTANCE, RunScripts.serviceLoader()); session.getExecutionContext().open(); return session; } @@ -117,9 +118,10 @@ private void testFuzzerScriptFile(final long timeSeed, String s, boolean realtim final int steps = TstUtils.SHORT_TESTS ? 20 : 100; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < steps; ++step) { final int fstep = step; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { System.out.println("Step = " + fstep); timeTable.run(); }); @@ -151,10 +153,11 @@ public void testInterestingFuzzerSeeds() throws IOException, InterruptedExceptio final Map hardReferences = new ConcurrentHashMap<>(); validateBindingTables(session, hardReferences); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); final TimeTable timeTable = (TimeTable) session.getVariable("tt"); for (int step = 0; step < fuzzDescriptor.steps; ++step) { final int fstep = step; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { System.out.println("Step = " + fstep); timeTable.run(); }); @@ -167,7 +170,7 @@ public void testInterestingFuzzerSeeds() throws IOException, InterruptedExceptio // public void testLargeFuzzerSeed() throws IOException, InterruptedException { // final int segmentSize = 50; // for (int firstRun = 0; firstRun < 100; firstRun += segmentSize) { - // UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + // ExecutionContext.getContext().updateGraph().resetForUnitTests(false); // final int lastRun = firstRun + segmentSize - 1; // System.out.println("Performing runs " + firstRun + " to " + lastRun); //// runLargeFuzzerSetWithSeed(1583849877513833000L, firstRun, lastRun); @@ -186,10 +189,11 @@ public void testLargeSetOfFuzzerQueriesRealtime() throws IOException, Interrupte public void testLargeSetOfFuzzerQueriesSimTime() throws IOException, InterruptedException { final long seed1 = Clock.system().currentTimeNanos(); final int iterations = TstUtils.SHORT_TESTS ? 1 : 5; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (long iteration = 0; iteration < iterations; ++iteration) { for (int segment = 0; segment < 10; segment++) { ChunkPoolReleaseTracking.enableStrict(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + updateGraph.resetForUnitTests(false); try (final SafeCloseable ignored = LivenessScopeStack.open()) { System.out.println("// Segment: " + segment); final int firstRun = segment * 10; @@ -250,9 +254,10 @@ private void runLargeFuzzerSetWithSeed(long mainTestSeed, int firstRun, int last final long loopStart = System.currentTimeMillis(); final TimeTable timeTable = (TimeTable) session.getVariable("tt"); final RuntimeMemory.Sample sample = new RuntimeMemory.Sample(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < stepsToRun; ++step) { final int fstep = step; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(timeTable::run); + updateGraph.runWithinUnitTestCycle(timeTable::run); RuntimeMemory.getInstance().read(sample); final long totalMemory = sample.totalMemory; diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/PartitionedTableTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/PartitionedTableTest.java index 480eb4c7f3d..c92aeb0e08f 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/PartitionedTableTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/PartitionedTableTest.java @@ -12,7 +12,6 @@ import io.deephaven.datastructures.util.CollectionUtil; import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.liveness.SingletonLivenessManager; import io.deephaven.engine.rowset.RowSet; @@ -26,8 +25,6 @@ import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.generator.SortedLongGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; import io.deephaven.io.logger.StreamLoggerImpl; @@ -82,7 +79,8 @@ public void testMergeSimple() { assertTableEquals(mergedByK, withK); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3, 9), col("Sym", "cc", "cc"), col("intCol", 30, 90), col("doubleCol", 2.3, 2.9)); queryTable.notifyListeners(i(3, 9), i(), i()); }); @@ -116,7 +114,8 @@ public void testMergePopulate() { assertTableEquals(mergedByK, withK); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3, 9), col("Sym", "cc", "cc"), col("intCol", 30, 90), col("doubleCol", 2.3, 2.9)); queryTable.notifyListeners(i(3, 9), i(), i()); }); @@ -245,7 +244,8 @@ public Table e() { } public void testTransformPartitionedTableThenMerge() { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); final QueryTable sourceTable = testRefreshingTable(i(1).toTracking(), intCol("Key", 1), intCol("Sentinel", 1), col("Sym", "a"), doubleCol("DoubleCol", 1.1)); @@ -283,7 +283,7 @@ protected Table e() { final int iterations = SHORT_TESTS ? 40 : 100; for (int ii = 0; ii < iterations; ++ii) { final int iteration = ii + 1; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final long baseLocation = iteration * 10L; final RowSet addRowSet = RowSetFactory.fromRange(baseLocation, baseLocation + 4); final int[] sentinels = @@ -366,12 +366,13 @@ public void testJoinSanity() { final Table mergedResult = result.target().merge(); TableTools.show(mergedResult); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.startCycleForUnitTests(); addToTable(left, i(8), col("USym", "bb"), col("Sym", "aa_1"), col("LeftSentinel", 80)); allowingError(() -> { left.notifyListeners(i(8), i(), i()); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); }, throwables -> { // We should deliver a failure to every dependent node TestCase.assertTrue(getUpdateErrors().size() > 0); @@ -393,18 +394,20 @@ public void testDependencies() { final Table aa2 = aa.update("S2=Sentinel * 2"); TableTools.show(aa2); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( - () -> TestCase.assertTrue(aa2.satisfied(LogicalClock.DEFAULT.currentStep()))); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> TestCase.assertTrue(aa2.satisfied(updateGraph.clock().currentStep()))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // We need to flush one notification: one for the source table because we do not require an intermediate + // view table in this case + updateGraph.runWithinUnitTestCycle(() -> { addToTable(sourceTable, i(8), col("USym", "bb"), col("Sentinel", 80)); sourceTable.notifyListeners(i(8), i(), i()); - TestCase.assertFalse(aa2.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(aa2.satisfied(updateGraph.clock().currentStep())); // We need to flush one notification: one for the source table because we do not require an intermediate // view table in this case - final boolean flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + final boolean flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue(aa2.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(aa2.satisfied(updateGraph.clock().currentStep())); }); } @@ -447,7 +450,8 @@ synchronized void pause() { } public void testCrossDependencies() { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 2, 0, 0); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 2, 0, 0); final QueryTable sourceTable = testRefreshingTable(i(1, 2).toTracking(), col("USym", "aa", "bb"), @@ -471,6 +475,7 @@ public void testCrossDependencies() { .captureQueryScopeVars("pauseHelper2") .captureQueryLibrary() .captureQueryCompiler() + .captureUpdateGraph() .build(); final PartitionedTable result2 = sourceTable2.update("SlowItDown=pauseHelper.pauseValue(k)").partitionBy("USym2").transform( @@ -487,7 +492,7 @@ public void testCrossDependencies() { }); final Table merged = joined.merge(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + updateGraph.startCycleForUnitTests(); addToTable(sourceTable, i(3), col("USym", "cc"), col("Sentinel", 30)); addToTable(sourceTable2, i(7, 9), col("USym2", "cc", "dd"), col("Sentinel2", 70, 90)); System.out.println("Launching Notifications"); @@ -505,7 +510,7 @@ public void testCrossDependencies() { System.out.println("Released."); }).start(); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); pauseHelper2.pause(); @@ -513,7 +518,7 @@ public void testCrossDependencies() { TableTools.showWithRowSet(sourceTable); TableTools.showWithRowSet(sourceTable2); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + updateGraph.startCycleForUnitTests(); addToTable(sourceTable, i(4, 5), col("USym", "cc", "dd"), col("Sentinel", 40, 50)); addToTable(sourceTable2, i(8, 10), col("USym2", "cc", "dd"), col("Sentinel2", 80, 100)); removeRows(sourceTable2, i(7, 9)); @@ -533,13 +538,14 @@ public void testCrossDependencies() { System.out.println("Released."); }).start(); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); TableTools.showWithRowSet(merged); } public void testCrossDependencies2() { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 2, 0, 0); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 2, 0, 0); final QueryTable sourceTable = testRefreshingTable(i(1, 2).toTracking(), col("USym", "aa", "bb"), @@ -560,6 +566,7 @@ public void testCrossDependencies2() { .captureQueryScopeVars("pauseHelper") .captureQueryLibrary() .captureQueryCompiler() + .captureUpdateGraph() .build(); final PartitionedTable result2 = sourceTable2.partitionBy("USym2").transform(executionContext, t -> t.withAttributes(Map.of(BaseTable.TEST_SOURCE_TABLE_ATTRIBUTE, "true")) @@ -573,7 +580,7 @@ public void testCrossDependencies2() { final Table merged = joined.merge(); pauseHelper.pause(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + updateGraph.startCycleForUnitTests(); addToTable(sourceTable, i(5), col("USym", "dd"), col("Sentinel", 50)); addToTable(sourceTable2, i(10), col("USym2", "dd"), col("Sentinel2", 100)); removeRows(sourceTable2, i(9)); @@ -592,7 +599,7 @@ public void testCrossDependencies2() { System.out.println("Released."); }).start(); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); TableTools.showWithRowSet(merged); } @@ -616,7 +623,7 @@ private void testPartitionedTableScope(boolean refreshing) { final SingletonLivenessManager manager = new SingletonLivenessManager(partitionedTable); - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(scopeCloseable::close); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(scopeCloseable::close); if (refreshing) { org.junit.Assert.assertTrue(partitionedTable.tryRetainReference()); @@ -629,9 +636,9 @@ private void testPartitionedTableScope(boolean refreshing) { final SafeCloseable scopeCloseable2 = LivenessScopeStack.open(); final Table valueAgain = partitionedTable.constituentFor("A"); assertSame(value, valueAgain); - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(scopeCloseable2::close); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(scopeCloseable2::close); - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(manager::release); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(manager::release); org.junit.Assert.assertFalse(value.tryRetainReference()); org.junit.Assert.assertFalse(partitionedTable.tryRetainReference()); @@ -775,9 +782,10 @@ public void testMergeConstituentChanges() { final ModifiedColumnSet modifiedColumnSet = base.getModifiedColumnSetForUpdates(); modifiedColumnSet.clear(); modifiedColumnSet.setAll("II"); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (step.incrementAndGet() <= 100) { final boolean evenStep = step.longValue() % 2 == 0; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { base.notifyListeners(new TableUpdateImpl( RowSetFactory.empty(), RowSetFactory.empty(), @@ -810,7 +818,8 @@ public void testMergeStaticAndRefreshing() { refreshingTable.setRefreshing(true); final Table mergedTable = PartitionedTableFactory.ofTables(staticTable, refreshingTable).merge(); assertTableEquals(mergedTable, emptyTable(200).update("II=ii")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { mergedTable.getRowSet().writableCast().removeRange(0, 1); ((BaseTable) mergedTable).notifyListeners(i(), RowSetFactory.fromRange(0, 1), i()); }); @@ -843,9 +852,10 @@ private EvalNugget newExecutionContextNugget( protected Table e() { // note we cannot reuse the execution context and remove the values as the table is built each iteration try (final SafeCloseable ignored = ExecutionContext.newBuilder() + .newQueryScope() .captureQueryCompiler() .captureQueryLibrary() - .newQueryScope() + .captureUpdateGraph() .build().open()) { ExecutionContext.getContext().getQueryScope().putParam("queryScopeVar", "queryScopeValue"); @@ -892,7 +902,8 @@ public void testExecutionContext() { } public void testTransformDependencyCorrectness() { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 2, 0, 0); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 2, 0, 0); final Table input = emptyTable(2).update("First=ii", "Second=100*ii"); input.setRefreshing(true); @@ -902,7 +913,12 @@ public void testTransformDependencyCorrectness() { filter.getRowSet().writableCast().remove(1); final PartitionedTable partitioned = input.partitionBy("First"); - final ExecutionContext executionContext = TestExecutionContext.createForUnitTests(); + final ExecutionContext executionContext = ExecutionContext.newBuilder() + .emptyQueryScope() + .newQueryLibrary() + .captureUpdateGraph() + .captureQueryCompiler() + .build(); final PartitionedTable transformed = partitioned.transform(executionContext, tableIn -> { final QueryTable tableOut = (QueryTable) tableIn.getSubTable(tableIn.getRowSet()); tableIn.addUpdateListener(new BaseTable.ListenerImpl("Slow Listener", tableIn, tableOut) { @@ -931,13 +947,13 @@ public void onUpdate(TableUpdate upstream) { TestCase.assertEquals(1, filteredTransformed.table().size()); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + updateGraph.startCycleForUnitTests(); try { ((BaseTable) input).notifyListeners(i(), i(), i(1)); filter.getRowSet().writableCast().insert(1); ((BaseTable) filter).notifyListeners(i(1), i(), i()); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); } TestCase.assertEquals(2, filteredTransformed.table().size()); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAggregationTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAggregationTest.java index 5027fda0ee5..7341231d27e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAggregationTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAggregationTest.java @@ -10,6 +10,7 @@ import io.deephaven.api.agg.spec.AggSpec; import io.deephaven.chunk.util.pools.ChunkPoolReleaseTracking; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.rowset.RowSet; @@ -17,7 +18,6 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.rowset.TrackingWritableRowSet; import io.deephaven.engine.table.*; -import io.deephaven.engine.testutil.QueryTableTestBase.TableComparator; import io.deephaven.engine.table.impl.by.*; import io.deephaven.engine.table.impl.indexer.RowSetIndexer; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; @@ -27,14 +27,14 @@ import io.deephaven.engine.table.impl.sources.UnionRedirection; import io.deephaven.engine.table.impl.util.ColumnHolder; import io.deephaven.engine.testutil.*; +import io.deephaven.engine.testutil.QueryTableTestBase.TableComparator; import io.deephaven.engine.testutil.generator.*; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.testutil.sources.TestColumnSource; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; -import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.qst.table.AggregateAllTable; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -335,41 +335,42 @@ protected final Table e() { } }; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { inputs[0].getRowSet().writableCast().insertRange(mergeChunkMultiple, 2 * mergeChunkMultiple - 1); inputs[0].notifyListeners(RowSetFactory.fromRange(mergeChunkMultiple, 2 * mergeChunkMultiple - 1), i(), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputs[1].getRowSet().writableCast().removeRange(mergeChunkMultiple - 1_000, mergeChunkMultiple - 1); inputs[1].notifyListeners(i(), RowSetFactory.fromRange(mergeChunkMultiple - 1_000, mergeChunkMultiple - 1), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputs[2].getRowSet().writableCast().insertRange(mergeChunkMultiple, 2 * mergeChunkMultiple - 1); inputs[2].notifyListeners(RowSetFactory.fromRange(mergeChunkMultiple, 2 * mergeChunkMultiple - 1), i(), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputs[0].getRowSet().writableCast().removeRange(mergeChunkMultiple, 2 * mergeChunkMultiple - 1); inputs[0].notifyListeners(i(), RowSetFactory.fromRange(mergeChunkMultiple, 2 * mergeChunkMultiple - 1), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputs[0].getRowSet().writableCast().removeRange(0, mergeChunkMultiple - 1); inputs[0].notifyListeners(i(), RowSetFactory.fromRange(0, mergeChunkMultiple - 1), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputs[4].getModifiedColumnSetForUpdates().clear(); inputs[4].getModifiedColumnSetForUpdates().setAll("StrCol"); inputs[4].notifyListeners(new TableUpdateImpl(i(), i(), RowSetFactory.fromRange(0, mergeChunkMultiple / 2), @@ -377,7 +378,7 @@ protected final Table e() { }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputs[4].getModifiedColumnSetForUpdates().clear(); inputs[4].getModifiedColumnSetForUpdates().setAll("IntCol"); inputs[4].notifyListeners(new TableUpdateImpl(i(), i(), RowSetFactory.fromRange(0, mergeChunkMultiple / 2), @@ -385,7 +386,7 @@ protected final Table e() { }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputs[4].getModifiedColumnSetForUpdates().clear(); inputs[4].getModifiedColumnSetForUpdates().setAll("TimeCol"); inputs[4].notifyListeners(new TableUpdateImpl(i(), i(), RowSetFactory.fromRange(0, mergeChunkMultiple / 2), @@ -415,43 +416,44 @@ protected Table e() { } }; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { input1.getRowSet().writableCast().removeRange(50, 99); input1.notifyListeners(i(), RowSetFactory.fromRange(50, 99), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { input1.getRowSet().writableCast().removeRange(0, 49); input1.notifyListeners(i(), RowSetFactory.fromRange(0, 49), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { input2.getRowSet().writableCast().insertRange(0, 49); input2.notifyListeners(RowSetFactory.fromRange(0, 49), i(), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { input2.getRowSet().writableCast().insertRange(50, 99); input2.notifyListeners(RowSetFactory.fromRange(50, 99), i(), i()); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { input2.notifyListeners(new TableUpdateImpl(i(0, 1), i(0, 1), i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY)); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { input2.notifyListeners( new TableUpdateImpl(i(), i(), i(2, 3), RowSetShiftData.EMPTY, ModifiedColumnSet.ALL)); }); validate(ens); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { input2.notifyListeners( new TableUpdateImpl(i(), i(), i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY)); }); @@ -730,7 +732,8 @@ public Table e() { return tableGrouped.lastBy("Sym"); } }}; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), col("Sym", "aa", "aa"), col("intCol", 20, 10), col("doubleCol", 2.1, 2.2)); queryTable.notifyListeners(i(7, 9), i(), i()); }); @@ -910,7 +913,8 @@ public Table e() { }, }; validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), col("Sym", "aa", "bc"), col("USym", "a", "b"), @@ -936,14 +940,14 @@ public void testAddOnlyLastAttribute() { final Table expected = newTable(col("USym", "a", "b"), intCol("intCol", 40, 60)); assertTableEquals(expected, lastBy); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); addToTable(queryTable, i(7, 9), col("USym", "a", "b"), col("intCol", 70, 90)); queryTable.notifyListeners(i(7, 9), i(), i()); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); final Table expected2 = newTable(col("USym", "a", "b"), intCol("intCol", 70, 90)); @@ -1041,25 +1045,26 @@ public Table e() { } } }; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), col("Sym", "aa", "aa"), col("intCol", 20, 10), col("doubleCol", 2.1, 2.2)); queryTable.notifyListeners(i(7, 9), i(), i()); }); validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), col("Sym", "bc", "bc"), col("intCol", 21, 11), col("doubleCol", 2.2, 2.3)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), col("Sym", "aa", "bc"), col("intCol", 20, 15), col("doubleCol", 2.1, 2.3)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), col("Sym", "aa", "bc"), col("intCol", 20, 15), col("doubleCol", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY)); @@ -1067,20 +1072,20 @@ public Table e() { }); validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), col("Sym", "aa", "bc"), col("intCol", 20, 15), col("doubleCol", Double.POSITIVE_INFINITY, Double.NaN)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), col("Sym", "aa", "bc"), col("intCol", 20, 15), col("doubleCol", 1.2, 2.2)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(queryTable, i(2, 9)); queryTable.notifyListeners(i(), i(2, 9), i()); }); @@ -1742,7 +1747,8 @@ public void testAbsSumBySimple() { TestCase.assertEquals(expected.doubleValue(), absSumDouble); TestCase.assertEquals(NULL_LONG, DataAccessHelpers.getColumn(result, "BoolCol").getLong(0)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(8), col("BigI", BigInteger.valueOf(5)), col("DoubleCol", 5.0), col("BoolCol", true)); table.notifyListeners(i(8), i(), i()); @@ -1756,7 +1762,7 @@ public void testAbsSumBySimple() { TestCase.assertEquals(expected, absSum); TestCase.assertEquals(expected.doubleValue(), absSumDouble); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2)); table.notifyListeners(i(), i(2), i()); }); @@ -1768,7 +1774,7 @@ public void testAbsSumBySimple() { TestCase.assertEquals(expected, absSum); TestCase.assertEquals(expected.doubleValue(), absSumDouble); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(8), col("BigI", BigInteger.valueOf(4)), col("DoubleCol", 4.0), col("BoolCol", false)); table.notifyListeners(i(), i(), i(8)); @@ -1782,7 +1788,7 @@ public void testAbsSumBySimple() { TestCase.assertEquals(expected, absSum); TestCase.assertEquals(expected.doubleValue(), absSumDouble); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(10), col("BigI", BigInteger.valueOf(0)), col("DoubleCol", Double.NaN), col("BoolCol", true)); table.notifyListeners(i(10), i(), i()); @@ -1795,7 +1801,7 @@ public void testAbsSumBySimple() { TestCase.assertEquals(Double.NaN, absSumDouble); TestCase.assertEquals(1L, DataAccessHelpers.getColumn(result, "BoolCol").getLong(0)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(10)); table.notifyListeners(i(), i(10), i()); }); @@ -1807,7 +1813,7 @@ public void testAbsSumBySimple() { TestCase.assertEquals(expected.doubleValue(), absSumDouble); TestCase.assertEquals(0L, DataAccessHelpers.getColumn(result, "BoolCol").getLong(0)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(12, 14), col("BigI", BigInteger.valueOf(0), BigInteger.valueOf(0)), doubleCol("DoubleCol", 0.0, 0.0), col("BoolCol", true, true)); table.notifyListeners(i(12, 14), i(), i()); @@ -1830,7 +1836,8 @@ public void testAbsSumByNull() { float absSumF = DataAccessHelpers.getColumn(result, "FloatCol").getFloat(0); TestCase.assertEquals(QueryConstants.NULL_FLOAT, absSumF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(8), col("IntCol", 5), floatCol("FloatCol", -5.5f)); table.notifyListeners(i(8), i(), i()); }); @@ -1840,7 +1847,7 @@ public void testAbsSumByNull() { TestCase.assertEquals(5L, absSum); TestCase.assertEquals(5.5f, absSumF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(8)); table.notifyListeners(i(), i(8), i()); }); @@ -1866,7 +1873,8 @@ public void testAvgInfinities() { double avgF = DataAccessHelpers.getColumn(result, "FloatCol").getDouble(0); TestCase.assertEquals(Double.NaN, avgF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(8), col("IntCol", 5), floatCol("FloatCol", 5f)); table.notifyListeners(i(8), i(), i()); }); @@ -1876,7 +1884,7 @@ public void testAvgInfinities() { TestCase.assertEquals(5.0, avg); TestCase.assertEquals(5.0, avgF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(9), col("IntCol", 6), floatCol("FloatCol", Float.POSITIVE_INFINITY)); table.notifyListeners(i(9), i(), i()); }); @@ -1886,7 +1894,7 @@ public void testAvgInfinities() { TestCase.assertEquals(5.5, avg); TestCase.assertEquals(Double.POSITIVE_INFINITY, avgF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(10), col("IntCol", 7), floatCol("FloatCol", Float.NEGATIVE_INFINITY)); table.notifyListeners(i(10), i(), i()); }); @@ -1896,7 +1904,7 @@ public void testAvgInfinities() { TestCase.assertEquals(6.0, avg); TestCase.assertEquals(Double.NaN, avgF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(9)); table.notifyListeners(i(), i(9), i()); }); @@ -1906,7 +1914,7 @@ public void testAvgInfinities() { TestCase.assertEquals(6.0, avg); TestCase.assertEquals(Double.NEGATIVE_INFINITY, avgF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(10)); addToTable(table, i(11), col("IntCol", 6), floatCol("FloatCol", Float.NaN)); table.notifyListeners(i(11), i(10), i()); @@ -1917,7 +1925,7 @@ public void testAvgInfinities() { TestCase.assertEquals(5.5, avg); TestCase.assertEquals(Double.NaN, avgF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(11)); table.notifyListeners(i(), i(11), i()); }); @@ -1944,7 +1952,8 @@ public void testVarInfinities() { double varF = DataAccessHelpers.getColumn(result, "FloatCol").getDouble(0); TestCase.assertEquals(Double.NaN, varF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 8), col("IntCol", 4, 5), floatCol("FloatCol", 4f, 5f)); table.notifyListeners(i(7, 8), i(), i()); }); @@ -1954,7 +1963,7 @@ public void testVarInfinities() { TestCase.assertEquals(0.5, var); TestCase.assertEquals(0.5, varF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(9), col("IntCol", 6), floatCol("FloatCol", Float.POSITIVE_INFINITY)); table.notifyListeners(i(9), i(), i()); }); @@ -1964,7 +1973,7 @@ public void testVarInfinities() { TestCase.assertEquals(1.0, var); TestCase.assertEquals(Double.NaN, varF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(10), col("IntCol", 7), floatCol("FloatCol", Float.NEGATIVE_INFINITY)); table.notifyListeners(i(10), i(), i()); }); @@ -1974,7 +1983,7 @@ public void testVarInfinities() { TestCase.assertEquals(1.0 + 2.0 / 3.0, var, 0.001); TestCase.assertEquals(Double.NaN, varF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(9)); table.notifyListeners(i(), i(9), i()); }); @@ -1984,7 +1993,7 @@ public void testVarInfinities() { TestCase.assertEquals(2.0 + 1.0 / 3.0, var, 0.001); TestCase.assertEquals(Double.NaN, varF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(10)); addToTable(table, i(11), col("IntCol", 6), floatCol("FloatCol", Float.NaN)); table.notifyListeners(i(11), i(10), i()); @@ -1995,7 +2004,7 @@ public void testVarInfinities() { TestCase.assertEquals(1.0, var); TestCase.assertEquals(Double.NaN, varF); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(11)); table.notifyListeners(i(), i(11), i()); }); @@ -2149,7 +2158,8 @@ public void testWeightedAvgByLong() { double expected = (double) wsum / (double) sumw; TestCase.assertEquals(expected, wavg); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(8), col("Long1", (long) Integer.MAX_VALUE), col("Long2", 7L)); table.notifyListeners(i(8), i(), i()); }); @@ -2507,8 +2517,9 @@ public Table e() { new TableComparator(queryTable.minBy("Sym").sort("Sym"), queryTable.applyToAllBy("min(each)", "Sym").sort("Sym")), }; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < 50; step++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet keysToAdd = newIndex(random.nextInt(size / 2 + 1), queryTable.getRowSet(), random); final ColumnHolder[] columnAdditions = new ColumnHolder[columnInfo.length]; @@ -2863,12 +2874,14 @@ protected void checkDifferences(String msg, Table recomputed) { } }; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < 10; ++step) { final int fstep = step; System.out.println("Step = " + step); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Modifies and Adds in post-shift keyspace. + updateGraph.runWithinUnitTestCycle(() -> { final RowSet added = RowSetFactory.fromRange(size * (fstep + 1), size * (fstep + 2) - 1); queryTable.getRowSet().writableCast().insert(added); @@ -2954,7 +2967,8 @@ public void testMedianTypes() { final Table refreshing = updated.medianBy(); final Table refreshingKeys = updated.medianBy("KeyCol"); TableTools.show(updated); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); TableTools.show(updated); TableTools.show(refreshing); @@ -3164,7 +3178,8 @@ public void testSelectDistinctUpdates() { // this should result in an new output row System.out.println("Adding key 4."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(7), col("x", 4)); table.notifyListeners(i(7), i(), i()); }); @@ -3180,7 +3195,7 @@ public void testSelectDistinctUpdates() { // we're going to add a duplicate key, which should result in no changes. System.out.println("Adding duplicate 1."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { listener.reset(); addToTable(table, i(9), col("x", 1)); table.notifyListeners(i(9), i(), i()); @@ -3193,7 +3208,7 @@ public void testSelectDistinctUpdates() { // now let's remove one of our rows, but not the last one with a given value, also expecting no changes System.out.println("Removing original 1."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { listener.reset(); removeRows(table, i(2)); table.notifyListeners(i(), i(2), i()); @@ -3206,7 +3221,7 @@ public void testSelectDistinctUpdates() { // remove the last instance of 1, which should remove it from the output table System.out.println("Removing last 1."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { listener.reset(); removeRows(table, i(9)); table.notifyListeners(i(), i(9), i()); @@ -3223,7 +3238,7 @@ public void testSelectDistinctUpdates() { // add it back System.out.println("Putting 1 back at place 9."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { listener.reset(); addToTable(table, i(9), col("x", 1)); table.notifyListeners(i(9), i(), i()); @@ -3240,7 +3255,7 @@ public void testSelectDistinctUpdates() { // and modify something, but keep the key the same System.out.println("False churn of key 1 (at 9)."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { listener.reset(); addToTable(table, i(9), col("x", 1)); table.notifyListeners(i(), i(), i(9)); @@ -3255,7 +3270,7 @@ public void testSelectDistinctUpdates() { // now modify it so that we generate a new key, but don't change the existing key's existence // and modify something, but keep the key the same System.out.println("Adding a 5, but not deleting what was at rowSet."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { listener.reset(); addToTable(table, i(4), col("x", 5)); table.notifyListeners(i(), i(), i(4)); @@ -3272,7 +3287,7 @@ public void testSelectDistinctUpdates() { // now modify it so that we remove an existing key System.out.println("Adding 5 in a way that deletes 2."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { listener.reset(); addToTable(table, i(8), col("x", 5)); table.notifyListeners(i(), i(), i(8)); @@ -3340,7 +3355,8 @@ public void onUpdate(TableUpdate upstream) { TableTools.showWithRowSet(reversedFlat); TableTools.showWithRowSet(last); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2), intCol("Sentinel", 2)); table.notifyListeners(i(2), i(), i()); }); @@ -3373,7 +3389,8 @@ public void testFirstByShift() { TestCase.assertEquals(4097, DataAccessHelpers.getColumn(lastResult, "Sentinel").getInt(0)); TestCase.assertEquals(2, DataAccessHelpers.getColumn(lastResult, "Sentinel").getInt(1)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(0), intCol("Sentinel", 0), col("Bucket", "C")); table.notifyListeners(i(0), i(), i()); }); @@ -3390,7 +3407,7 @@ public void testFirstByShift() { TestCase.assertEquals(2, DataAccessHelpers.getColumn(lastResult, "Sentinel").getInt(1)); TestCase.assertEquals(0, DataAccessHelpers.getColumn(lastResult, "Sentinel").getInt(2)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { for (int idx = 3; idx < 4097; ++idx) { addToTable(table, i(idx), intCol("Sentinel", idx), col("Bucket", "C")); } @@ -3409,7 +3426,7 @@ public void testFirstByShift() { TestCase.assertEquals(2, DataAccessHelpers.getColumn(lastResult, "Sentinel").getInt(1)); TestCase.assertEquals(4096, DataAccessHelpers.getColumn(lastResult, "Sentinel").getInt(2)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { ((TestColumnSource) table.getColumnSource("Sentinel")).shift(0, 4097, 4096); ((TestColumnSource) table.getColumnSource("Bucket")).shift(0, 4097, 4096); table.getRowSet().writableCast().removeRange(0, 4095); @@ -3493,7 +3510,8 @@ public void testIds6220() { final Table byTable = table.where(filter).groupBy("Key"); TableTools.showWithRowSet(byTable); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(filter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(filter::run); TableTools.showWithRowSet(byTable); @@ -3532,19 +3550,20 @@ public void testIds6203() { final String[] keys2 = new String[newSize]; Arrays.fill(keys2, "Key"); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet additions = RowSetFactory.fromRange(0, newSize - 1); - addToTable(table, additions, col("Key", keys2), intCol("IntCol", sentinel2)); - table.notifyListeners(additions, i(), i()); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet additions5 = RowSetFactory.fromRange(0, newSize - 1); + addToTable(table, additions5, col("Key", keys2), intCol("IntCol", sentinel2)); + table.notifyListeners(additions5, i(), i()); }); assertTableEquals(table, flat); assertTableEquals(table, subTable); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet removals = RowSetFactory.fromRange(100, 100 + newSize - 1); - removeRows(table, removals); - table.notifyListeners(i(), removals, i()); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet removals6 = RowSetFactory.fromRange(100, 100 + newSize - 1); + removeRows(table, removals6); + table.notifyListeners(i(), removals6, i()); }); assertTableEquals(table, flat); assertTableEquals(table, subTable); @@ -3554,32 +3573,32 @@ public void testIds6203() { } // changed delta - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet additions = RowSetFactory.fromRange(newSize, newSize + newSize - 1); - final RowSet removals = RowSetFactory.fromRange(6000, 6000 + newSize - 3); - addToTable(table, additions, col("Key", keys2), intCol("IntCol", sentinel2)); - removeRows(table, removals); - table.notifyListeners(additions, removals, i()); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet additions4 = RowSetFactory.fromRange(newSize, newSize + newSize - 1); + final RowSet removals5 = RowSetFactory.fromRange(6000, 6000 + newSize - 3); + addToTable(table, additions4, col("Key", keys2), intCol("IntCol", sentinel2)); + removeRows(table, removals5); + table.notifyListeners(additions4, removals5, i()); }); assertTableEquals(table, flat); assertTableEquals(table, subTable); // polarity reversal - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet additions = RowSetFactory.fromRange(newSize * 2, newSize * 3 - 1); - final RowSet removals = RowSetFactory.fromRange(6000 + newSize, 6000 + newSize * 3); - addToTable(table, additions, col("Key", keys2), intCol("IntCol", sentinel2)); - removeRows(table, removals); - table.notifyListeners(additions, removals, i()); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet additions3 = RowSetFactory.fromRange(newSize * 2, newSize * 3 - 1); + final RowSet removals4 = RowSetFactory.fromRange(6000 + newSize, 6000 + newSize * 3); + addToTable(table, additions3, col("Key", keys2), intCol("IntCol", sentinel2)); + removeRows(table, removals4); + table.notifyListeners(additions3, removals4, i()); }); assertTableEquals(table, flat); assertTableEquals(table, subTable); // prepare a hole - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet removals = RowSetFactory.fromRange(7000, 7100); - removeRows(table, removals); - table.notifyListeners(i(), removals, i()); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet removals3 = RowSetFactory.fromRange(7000, 7100); + removeRows(table, removals3); + table.notifyListeners(i(), removals3, i()); }); assertTableEquals(table, flat); assertTableEquals(table, subTable); @@ -3589,7 +3608,7 @@ public void testIds6203() { } // intervening keys - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet additions1 = RowSetFactory.fromRange(newSize * 3, newSize * 4 - 1); final RowSet additions2 = RowSetFactory.fromRange(7000, 7000 + newSize - 1); final RowSet removals = RowSetFactory.fromRange(6000 + newSize * 4, 6000 + newSize * 5 - 1); @@ -3606,7 +3625,7 @@ public void testIds6203() { } // intervening keys without reversed polarity - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet removals1 = RowSetFactory.fromRange(0, newSize - 1); final RowSet removals2 = RowSetFactory.fromRange(7000, 7000 + newSize - 1); final RowSet allRemovals = removals1.union(removals2); @@ -3639,7 +3658,8 @@ public void testIds6321() { System.out.println("Starting:"); TableTools.showWithRowSet(exposedLastBy); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(source, i(20), col("Key", "A"), col("Sentinel", 10)); removeRows(source, i(10)); final TableUpdateImpl update = new TableUpdateImpl(); @@ -3656,7 +3676,7 @@ public void testIds6321() { System.out.println("Shifted:"); TableTools.showWithRowSet(exposedLastBy); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(source, i(20), col("Key", "A"), intCol("Sentinel", 20)); source.notifyListeners(i(), i(), i(20)); }); @@ -3677,7 +3697,8 @@ public void testIds6332() { TableTools.show(percentile); TestCase.assertEquals(BigInteger.valueOf(100), DataAccessHelpers.getColumn(percentile, "Value").get(0)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet removeRowSet = RowSetFactory.fromRange(2, 6); removeRows(source, removeRowSet); source.notifyListeners(i(), removeRowSet, i()); @@ -3750,13 +3771,14 @@ public void testInitialGroupsRefreshing() { final Table initialState = aggregated.snapshot(); TestCase.assertEquals(5, aggregated.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { inputRows.insertRange(0, 8); input.notifyListeners(RowSetFactory.fromRange(0, 8), i(), i()); }); TestCase.assertEquals(5, aggregated.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputRows.removeRange(0, 8); input.notifyListeners(i(), RowSetFactory.fromRange(0, 8), i()); }); @@ -3788,13 +3810,14 @@ public void testPreserveEmptyNoKey() { TestCase.assertEquals(1, aggregated.size()); assertTableEquals(expectedEmpty, aggregated); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { inputRows.insertRange(0, 9); input.notifyListeners(RowSetFactory.fromRange(0, 9), i(), i()); }); TestCase.assertEquals(1, aggregated.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { inputRows.removeRange(0, 9); input.notifyListeners(i(), RowSetFactory.fromRange(0, 9), i()); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAjTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAjTest.java index b73a9f6fdae..419cddcce6b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAjTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableAjTest.java @@ -7,16 +7,13 @@ import io.deephaven.base.clock.Clock; import io.deephaven.base.testing.BaseArrayTestCase; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.primitive.iterator.CloseableIterator; import io.deephaven.engine.table.PartitionedTable; -import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.*; -import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.testutil.EvalNuggetInterface; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.select.MatchPairFactory; import io.deephaven.engine.context.QueryScope; @@ -438,7 +435,8 @@ private void tickCheck(Table left, boolean key, final String stampColumn, final new io.deephaven.engine.table.impl.ErrorListener(result1); result1.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(4, 5, 6), stringCol("SingleKey", "Key", "Key", "Key"), byteCol("ByteCol", (byte) 4, (byte) 6, (byte) 5), diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableCrossJoinSmallRightBitsTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableCrossJoinSmallRightBitsTest.java index 28dba55d264..acb6b75e13b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableCrossJoinSmallRightBitsTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableCrossJoinSmallRightBitsTest.java @@ -5,26 +5,24 @@ import io.deephaven.api.JoinMatch; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.exceptions.OutOfKeySpaceException; +import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.ModifiedColumnSet; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import io.deephaven.engine.rowset.RowSetFactory; -import io.deephaven.engine.rowset.RowSetShiftData; -import io.deephaven.engine.exceptions.OutOfKeySpaceException; import io.deephaven.test.types.OutOfBandTest; import org.apache.commons.lang3.mutable.MutableInt; +import org.junit.experimental.categories.Category; import java.util.Arrays; import java.util.List; -import org.junit.experimental.categories.Category; - +import static io.deephaven.engine.testutil.TstUtils.*; import static io.deephaven.engine.util.TableTools.col; import static io.deephaven.engine.util.TableTools.intCol; -import static io.deephaven.engine.testutil.TstUtils.i; -import static io.deephaven.engine.testutil.TstUtils.testRefreshingTable; -import static io.deephaven.engine.testutil.TstUtils.testTable; import static java.util.Collections.emptyList; @Category(OutOfBandTest.class) @@ -128,7 +126,8 @@ public void testLeftGroupChangesOnRightShift() { new io.deephaven.engine.table.impl.SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(lTable, i(1, 2, 3), col("A", 1, 3, 4)); final TableUpdateImpl lUpdate = new TableUpdateImpl(); @@ -187,7 +186,8 @@ public void testLeftGroupChangesOnRightShiftWithAllInnerShifts() { new io.deephaven.engine.table.impl.SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(lTable, i(1, 2, 3), col("A", 1, 3, 4)); final TableUpdateImpl lUpdate = new TableUpdateImpl(); @@ -243,7 +243,8 @@ public void testLeftGroupChangesOnBothShift() { new io.deephaven.engine.table.impl.SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(lTable, i(0)); TstUtils.addToTable(lTable, i(1, 2, 3, 4, 5), col("A", 0, 1, 3, 4, 5)); @@ -305,7 +306,8 @@ public void testLeftGroupChangesOnBothShiftWithInnerShifts() { new io.deephaven.engine.table.impl.SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(lTable, i(0)); TstUtils.addToTable(lTable, i(1, 2, 3, 4, 5), col("A", 0, 1, 3, 4, 5)); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableCrossJoinTestBase.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableCrossJoinTestBase.java index 1c9cc324af8..82e7f80c675 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableCrossJoinTestBase.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableCrossJoinTestBase.java @@ -3,35 +3,36 @@ */ package io.deephaven.engine.table.impl; +import com.google.common.collect.Maps; import gnu.trove.list.array.TLongArrayList; import io.deephaven.api.JoinMatch; import io.deephaven.base.verify.Assert; +import io.deephaven.chunk.ResettableWritableIntChunk; +import io.deephaven.chunk.WritableIntChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.datastructures.util.CollectionUtil; -import com.google.common.collect.Maps; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.rowset.RowSetShiftData; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; +import io.deephaven.engine.table.impl.select.MatchPairFactory; import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import io.deephaven.engine.table.impl.select.MatchPairFactory; import io.deephaven.engine.util.PrintListener; import io.deephaven.engine.util.TableTools; -import io.deephaven.engine.table.ColumnSource; -import io.deephaven.chunk.*; -import io.deephaven.engine.rowset.RowSetFactory; -import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.test.types.OutOfBandTest; import org.apache.commons.lang3.mutable.MutableInt; import org.apache.commons.lang3.mutable.MutableLong; import org.apache.commons.lang3.mutable.MutableObject; +import org.junit.experimental.categories.Category; import java.util.*; -import org.junit.experimental.categories.Category; -import static io.deephaven.engine.util.TableTools.*; import static io.deephaven.engine.testutil.TstUtils.*; +import static io.deephaven.engine.util.TableTools.*; import static java.util.Collections.emptyList; @Category(OutOfBandTest.class) @@ -69,7 +70,7 @@ public void testZeroKeyJoinBitExpansionOnAdd() { new io.deephaven.engine.table.impl.SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { addToTable(rTable, i(1 << 16), longCol("Y", 3)); final TableUpdateImpl update = new TableUpdateImpl(); update.added = i(1 << 16); @@ -105,7 +106,7 @@ public void testZeroKeyJoinBitExpansionOnBoundaryShift() { new io.deephaven.engine.table.impl.SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { removeRows(rTable, i(origIndex)); addToTable(rTable, i(newIndex), longCol("Y", 2)); final TableUpdateImpl update = new TableUpdateImpl(); @@ -144,7 +145,7 @@ public void testZeroKeyJoinBitExpansionWithInnerShift() { new io.deephaven.engine.table.impl.SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { removeRows(rTable, i(128)); addToTable(rTable, i(129, 1 << 16), longCol("Y", 2, 4)); final TableUpdateImpl update = new TableUpdateImpl(); @@ -176,7 +177,9 @@ public void testZeroKeyJoinCompoundShift() { }; TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // left table + // right table + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // left table removeRows(lTable, i(0, 1, 2, 3)); addToTable(lTable, i(2, 4, 5, 7), col("X", "a", "b", "c", "d")); @@ -229,7 +232,7 @@ public void testCrossJoinShift() { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { addToTable(right, i(4, 5), intCol("RK", 2, 2), intCol("RS", 40, 50)); right.notifyListeners(i(4, 5), i(), i()); }); @@ -277,7 +280,8 @@ private void testIncrementalZeroKeyJoin(final String ctxt, final int size, final }; for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // left size is sqrt right table size; which is a good update size for the right table + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { final int stepInstructions = random.nextInt(); if (stepInstructions % 4 != 1) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, leftSize, @@ -473,14 +477,15 @@ public void testStaticVsNaturalJoin2() { assertTableEquals(z3, z); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { xqt.getRowSet().writableCast().insertRange(size, size * 2); xqt.notifyListeners(RowSetFactory.fromRange(size, size * 2), i(), i()); }); assertTableEquals(z3, z); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { yqt.getRowSet().writableCast().insertRange(size, size * 2); yqt.notifyListeners(RowSetFactory.fromRange(size, size * 2), i(), i()); }); @@ -559,8 +564,9 @@ int initialBuildSize() { } }; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final int stepInstructions = random.nextInt(); if (stepInstructions % 4 != 1) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, @@ -623,8 +629,9 @@ protected void testIncrementalWithKeyColumns(final String ctxt, final int initia TableTools.showWithRowSet(rightStatic); } + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final int stepInstructions = random.nextInt(); if (stepInstructions % 4 != 1) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, @@ -698,10 +705,11 @@ int initialBuildSize() { TableTools.showWithRowSet(rightTicking); } + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { final long rightOffset = numSteps.getValue(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftTicking, i(numSteps.getValue()), longCol("intCol", numSteps.getValue())); TableUpdateImpl up = new TableUpdateImpl(); up.shifted = RowSetShiftData.EMPTY; diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableFlattenTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableFlattenTest.java index 6e99995218f..70d3467c7a3 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableFlattenTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableFlattenTest.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -11,7 +12,6 @@ import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import junit.framework.TestCase; import org.junit.Assert; @@ -258,7 +258,8 @@ void modAndValidate(final Runnable modTable, final RowSet added, final RowSet re final RowSetShiftData shifted) { ++updateCount; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(modTable::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(modTable::run); showWithRowSet(sourceTable); if (listener instanceof SimpleShiftObliviousListener) { @@ -333,7 +334,8 @@ public void testFlattenFollowedBySumBy() { final Table expected = odds.sumBy("B"); final Table actual = odds.flatten().sumBy("B"); assertTableEquals(expected, actual); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(upstream, RowSetFactory.fromRange(100_001, 200_000)); upstream.notifyListeners(RowSetFactory.fromRange(100_001, 200_000), i(), RowSetFactory.fromRange(0, 100_000)); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableJoinTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableJoinTest.java index a17704ac3f5..7470e0b47e2 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableJoinTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableJoinTest.java @@ -4,6 +4,7 @@ package io.deephaven.engine.table.impl; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; @@ -15,7 +16,6 @@ import io.deephaven.vector.IntVector; import io.deephaven.vector.ObjectVector; import io.deephaven.vector.DoubleVector; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.context.QueryScope; import io.deephaven.util.type.ArrayTypeUtils; import io.deephaven.engine.testutil.junit4.EngineCleanup; @@ -561,15 +561,16 @@ public Table e() { } }; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i())); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i())); TstUtils.validate(en); System.out.println("Notifying listeners of modification."); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i(4, 5))); + updateGraph.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i(4, 5))); System.out.println("Finished notifying listeners of modification."); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(table, i(4)); table.notifyListeners(i(), i(4), i()); }); @@ -632,7 +633,8 @@ public void testAjEmptyRight() { assertEquals(asList(null, null, null, null), asList((Object[]) DataAccessHelpers.getColumn(aj, "RSentinel").getDirect())); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left, i(2), col("Group", "h"), col("LInt", 4), col("LSentinel", "b")); left.notifyListeners(i(), i(), i(2)); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableLeftOuterJoinTestBase.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableLeftOuterJoinTestBase.java index 64d511647ab..a0937517b90 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableLeftOuterJoinTestBase.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableLeftOuterJoinTestBase.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.pools.ChunkPoolReleaseTracking; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -16,14 +17,11 @@ import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.select.MatchPairFactory; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.testutil.GenerateTableUpdates; -import io.deephaven.engine.testutil.QueryTableTestBase; -import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.testutil.generator.*; +import io.deephaven.engine.testutil.*; +import io.deephaven.engine.testutil.generator.IntArrayGenerator; +import io.deephaven.engine.testutil.generator.IntGenerator; +import io.deephaven.engine.testutil.generator.StringArrayGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.PrintListener; import io.deephaven.engine.util.TableTools; import io.deephaven.test.types.OutOfBandTest; @@ -138,7 +136,8 @@ public void testZeroKeyJoinBitExpansionOnAdd() { final SimpleListener listener = new SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rTable, i(1 << 16), longCol("Y", 3)); final TableUpdateImpl update = new TableUpdateImpl(); update.added = i(1 << 16); @@ -173,7 +172,8 @@ public void testZeroKeyJoinBitExpansionOnBoundaryShift() { final SimpleListener listener = new SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(rTable, i(origIndex)); addToTable(rTable, i(newIndex), longCol("Y", 2)); final TableUpdateImpl update = new TableUpdateImpl(); @@ -211,7 +211,8 @@ public void testZeroKeyJoinBitExpansionWithInnerShift() { final SimpleListener listener = new SimpleListener(jt); jt.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(rTable, i(128)); addToTable(rTable, i(129, 1 << 16), longCol("Y", 2, 4)); final TableUpdateImpl update = new TableUpdateImpl(); @@ -243,7 +244,10 @@ public void testZeroKeyJoinCompoundShift() { }; TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // left table + // right table + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { // left table removeRows(lTable, i(0, 1, 2, 3)); addToTable(lTable, i(2, 4, 5, 7), col("X", "a", "b", "c", "d")); @@ -338,7 +342,9 @@ private void testIncrementalZeroKeyJoin(final String ctxt, final int size, final if (printTableUpdates) { System.out.println("Size = " + size + ", seed=" + seed + ", step = " + numSteps.intValue()); } - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // left size is sqrt right table size; which is a good update size for the right table + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final int stepInstructions = random.nextInt(); if (stepInstructions % 4 != 1) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, leftSize, @@ -369,7 +375,8 @@ public void testZeroKeyLeftOuterJoinSimple() { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(right, i(0, 1)); right.notifyListeners(i(), i(0, 1), i()); }); @@ -384,7 +391,7 @@ public void testZeroKeyLeftOuterJoinSimple() { assertTableEquals(TableTools.newTable(intCol("LS", 1, 2, 3, 4, 5), intCol("RS", NULL_INT, NULL_INT, NULL_INT, NULL_INT, NULL_INT)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(7), intCol("RS", 30)); right.notifyListeners(i(7), i(), i()); addToTable(left, i(6), intCol("LS", 6)); @@ -402,7 +409,7 @@ public void testZeroKeyLeftOuterJoinSimple() { assertEquals(i(), listener.update.modified()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(7), intCol("RS", 40)); final TableUpdateImpl update = new TableUpdateImpl(i(), i(), i(7), RowSetShiftData.EMPTY, right.newModifiedColumnSet("RS")); @@ -418,7 +425,7 @@ public void testZeroKeyLeftOuterJoinSimple() { assertEquals(i(), listener.update.removed()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(7), intCol("RS", 50)); addToTable(right, i(4), intCol("RS", 60)); right.notifyListeners(new TableUpdateImpl(i(4), i(), i(7), RowSetShiftData.EMPTY, @@ -442,7 +449,7 @@ public void testZeroKeyLeftOuterJoinSimple() { TableTools.showWithRowSet(left); TableTools.showWithRowSet(joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(right, i(4, 7)); addToTable(right, i(2, 3), intCol("RS", 70, 80)); right.notifyListeners(new TableUpdateImpl(i(2, 3), i(4, 7), i(), RowSetShiftData.EMPTY, @@ -478,7 +485,8 @@ public void testZeroKeyTransitions() { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(0, 1), intCol("RS", 1, 2)); right.notifyListeners(i(0, 1), i(), i()); }); @@ -491,7 +499,7 @@ public void testZeroKeyTransitions() { assertTableEquals(TableTools.newTable(intCol("LS", 1, 1), intCol("LS2", 100, 100), intCol("RS", 1, 2)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(right, i(0, 1)); right.notifyListeners(i(), i(0, 1), i()); }); @@ -504,11 +512,11 @@ public void testZeroKeyTransitions() { assertEquals(i(), listener.update.modified()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left, i(10), intCol("LS", 1), intCol("LS2", 101)); - final TableUpdateImpl update = new TableUpdateImpl(i(), i(), i(10), + final TableUpdateImpl update3 = new TableUpdateImpl(i(), i(), i(10), RowSetShiftData.EMPTY, left.newModifiedColumnSet("LS2")); - left.notifyListeners(update); + left.notifyListeners(update3); }); assertTableEquals(TableTools.newTable(intCol("LS", 1), intCol("LS2", 101), intCol("RS", NULL_INT)), joined); @@ -521,7 +529,7 @@ public void testZeroKeyTransitions() { assertEquals(i(), listener.update.removed()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(0), intCol("RS", 50)); right.notifyListeners(i(0), i(), i()); }); @@ -535,11 +543,11 @@ public void testZeroKeyTransitions() { assertEquals(i(20), listener.update.removed()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(0), intCol("RS", 60)); - final TableUpdateImpl update = new TableUpdateImpl(i(), i(), i(0), RowSetShiftData.EMPTY, + final TableUpdateImpl update2 = new TableUpdateImpl(i(), i(), i(0), RowSetShiftData.EMPTY, right.newModifiedColumnSet("RS")); - right.notifyListeners(update); + right.notifyListeners(update2); }); assertTableEquals(TableTools.newTable(intCol("LS", 1), intCol("LS2", 101), intCol("RS", 60)), joined); @@ -553,11 +561,11 @@ public void testZeroKeyTransitions() { listener.reset(); // empty out right in preparation for next tests, make something from left we can remove - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(right, i(0)); - final TableUpdateImpl update = + final TableUpdateImpl update1 = new TableUpdateImpl(i(), i(0), i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY); - right.notifyListeners(update); + right.notifyListeners(update1); addToTable(left, i(11, 20), intCol("LS", 2, 4), intCol("LS2", 102, 104)); left.notifyListeners(i(11, 20), i(), i()); }); @@ -565,11 +573,11 @@ public void testZeroKeyTransitions() { intCol("RS", NULL_INT, NULL_INT, NULL_INT)), joined); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left, i(11), intCol("LS", 3), intCol("LS2", 102)); - final TableUpdateImpl updateLeft = new TableUpdateImpl(i(), i(), i(11), + final TableUpdateImpl updateLeft2 = new TableUpdateImpl(i(), i(), i(11), RowSetShiftData.EMPTY, left.newModifiedColumnSet("LS")); - left.notifyListeners(updateLeft); + left.notifyListeners(updateLeft2); }); assertTableEquals(TableTools.newTable(intCol("LS", 1, 3, 4), intCol("LS2", 101, 102, 104), intCol("RS", NULL_INT, NULL_INT, NULL_INT)), joined); @@ -581,11 +589,11 @@ public void testZeroKeyTransitions() { listener.reset(); // right empty, remove from left - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(left, i(20)); - final TableUpdateImpl updateLeft = + final TableUpdateImpl updateLeft1 = new TableUpdateImpl(i(), i(20), i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY); - left.notifyListeners(updateLeft); + left.notifyListeners(updateLeft1); }); assertTableEquals( TableTools.newTable(intCol("LS", 1, 3), intCol("LS2", 101, 102), intCol("RS", NULL_INT, NULL_INT)), @@ -597,7 +605,8 @@ public void testZeroKeyTransitions() { assertEquals(i(40), listener.update.removed()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // right transitions to non-empty, left has a remove + updateGraph.runWithinUnitTestCycle(() -> { // right transitions to non-empty, left has a remove addToTable(right, i(0), intCol("RS", 70)); final TableUpdateImpl update = @@ -681,7 +690,8 @@ private void testLeftOuterJoinSimpleIncremental(JoinControl joinControl) { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(right, i(0, 1)); right.notifyListeners(i(), i(0, 1), i()); }); @@ -696,7 +706,7 @@ private void testLeftOuterJoinSimpleIncremental(JoinControl joinControl) { assertTableEquals(TableTools.newTable(intCol("LK", 1, 2, 1, 3, 2, 1), intCol("LS", 1, 2, 3, 4, 5, 6), intCol("RS", NULL_INT, 30, NULL_INT, NULL_INT, 30, NULL_INT)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(7), intCol("RK", 3), intCol("RS", 40)); right.notifyListeners(i(7), i(), i()); }); @@ -708,7 +718,7 @@ private void testLeftOuterJoinSimpleIncremental(JoinControl joinControl) { assertTableEquals(TableTools.newTable(intCol("LK", 1, 2, 1, 3, 2, 1), intCol("LS", 1, 2, 3, 4, 5, 6), intCol("RS", NULL_INT, 30, NULL_INT, 40, 30, NULL_INT)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(8, 9), intCol("RK", 3, 3), intCol("RS", 50, 60)); right.notifyListeners(i(8, 9), i(), i()); addToTable(left, i(7), intCol("LK", 1), intCol("LS", 8)); @@ -720,7 +730,7 @@ private void testLeftOuterJoinSimpleIncremental(JoinControl joinControl) { intCol("RS", NULL_INT, 30, NULL_INT, 40, 50, 60, 30, NULL_INT, NULL_INT)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(0), intCol("RK", 4), intCol("RS", 70)); final TableUpdateImpl update = new TableUpdateImpl(i(0), i(), i(), RowSetShiftData.EMPTY, right.newModifiedColumnSet("RS")); @@ -736,7 +746,7 @@ private void testLeftOuterJoinSimpleIncremental(JoinControl joinControl) { joined); System.out.println("Activate K=4"); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left, i(0, 11), intCol("LK", 4, 3), intCol("LS", 1, 10)); removeRows(left, i(1)); left.notifyListeners(new TableUpdateImpl(i(11), i(1), i(0), RowSetShiftData.EMPTY, @@ -750,19 +760,19 @@ private void testLeftOuterJoinSimpleIncremental(JoinControl joinControl) { intCol("LS", 1, 3, 4, 4, 4, 5, 6, 8, 10, 10, 10), intCol("RS", 70, NULL_INT, 40, 50, 60, 30, NULL_INT, NULL_INT, 40, 50, 60)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left, i(1), intCol("LK", 4), intCol("LS", 1)); removeRows(left, i(0)); - final RowSetShiftData.Builder shiftBuilder = new RowSetShiftData.Builder(); - shiftBuilder.shiftRange(0, 0, 1); + final RowSetShiftData.Builder shiftBuilder1 = new RowSetShiftData.Builder(); + shiftBuilder1.shiftRange(0, 0, 1); left.notifyListeners( - new TableUpdateImpl(i(), i(), i(), shiftBuilder.build(), ModifiedColumnSet.EMPTY)); + new TableUpdateImpl(i(), i(), i(), shiftBuilder1.build(), ModifiedColumnSet.EMPTY)); }); assertTableEquals(TableTools.newTable(intCol("LK", 4, 1, 3, 3, 3, 2, 1, 1, 3, 3, 3), intCol("LS", 1, 3, 4, 4, 4, 5, 6, 8, 10, 10, 10), intCol("RS", 70, NULL_INT, 40, 50, 60, 30, NULL_INT, NULL_INT, 40, 50, 60)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(16, 17), intCol("RK", 3, 3), intCol("RS", 50, 60)); removeRows(right, i(8, 9)); @@ -790,7 +800,8 @@ public void testLeftOuterJoinShiftAndTransitionToFull() { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(1), intCol("RK", 1), intCol("RS", 20)); right.notifyListeners(i(1), i(), i()); @@ -834,7 +845,8 @@ public void testLeftOuterJoinShiftAndRightBitsIncrease() { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(1, 2, 3), intCol("RK", 1, 2, 2), intCol("RS", 20, 30, 40)); right.notifyListeners(i(1, 2, 3), i(), i()); @@ -879,7 +891,8 @@ public void testLeftOuterJoinShiftAndTransitionToEmpty() { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(right, i(0)); right.notifyListeners(i(), i(0), i()); @@ -930,7 +943,8 @@ private void testLeftOuterJoinSimpleLeftIncremental(JoinControl joinControl) { final SimpleListener listener = new SimpleListener(joined); (joined).addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left, i(7), intCol("LK", 1), intCol("LS", 8)); left.notifyListeners(i(7), i(), i()); }); @@ -939,7 +953,7 @@ private void testLeftOuterJoinSimpleLeftIncremental(JoinControl joinControl) { intCol("LS", 1, 1, 2, 3, 3, 4, 5, 6, 6, 8, 8), intCol("RS", 10, 20, 30, 10, 20, NULL_INT, 30, 10, 20, 10, 20)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left, i(0, 11), intCol("LK", 4, 3), intCol("LS", 1, 10)); removeRows(left, i(1)); left.notifyListeners(new TableUpdateImpl(i(11), i(1), i(0), RowSetShiftData.EMPTY, @@ -971,7 +985,8 @@ public void testLeftTickingRightStaticRemoveWithoutRightState() { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left, i(20), intCol("LK", 1), intCol("LS", 3)); removeRows(left, i(10)); left.notifyListeners(i(), i(10), i(20)); @@ -1013,7 +1028,8 @@ private void testLeftOuterJoinSimpleRightIncremental(JoinControl joinControl) { final SimpleListener listener = new SimpleListener(joined); joined.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(right, i(0, 1)); right.notifyListeners(i(), i(0, 1), i()); }); @@ -1028,7 +1044,7 @@ private void testLeftOuterJoinSimpleRightIncremental(JoinControl joinControl) { assertTableEquals(TableTools.newTable(intCol("LK", 1, 2, 1, 3, 2, 1), intCol("LS", 1, 2, 3, 4, 5, 6), intCol("RS", NULL_INT, 30, NULL_INT, NULL_INT, 30, NULL_INT)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(7), intCol("RK", 3), intCol("RS", 40)); right.notifyListeners(i(7), i(), i()); }); @@ -1045,7 +1061,7 @@ private void testLeftOuterJoinSimpleRightIncremental(JoinControl joinControl) { TableTools.showWithRowSet(joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(8, 9), intCol("RK", 3, 3), intCol("RS", 50, 60)); right.notifyListeners(i(8, 9), i(), i()); }); @@ -1069,7 +1085,7 @@ private void testLeftOuterJoinSimpleRightIncremental(JoinControl joinControl) { } listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(0), intCol("RK", 4), intCol("RS", 70)); final TableUpdateImpl update = new TableUpdateImpl(i(0), i(), i(), RowSetShiftData.EMPTY, right.newModifiedColumnSet("RS")); @@ -1079,7 +1095,7 @@ private void testLeftOuterJoinSimpleRightIncremental(JoinControl joinControl) { intCol("LS", 1, 2, 3, 4, 4, 4, 5, 6), intCol("RS", NULL_INT, 30, NULL_INT, 40, 50, 60, 30, NULL_INT)), joined); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(16, 17), intCol("RK", 3, 3), intCol("RS", 50, 60)); removeRows(right, i(8, 9)); final RowSetShiftData.Builder shiftBuilder = new RowSetShiftData.Builder(); @@ -1294,14 +1310,15 @@ public void testStaticVsNaturalJoin2() { assertTableEquals(z2, z); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { xqt.getRowSet().writableCast().insertRange(size, size * 2); xqt.notifyListeners(RowSetFactory.fromRange(size, size * 2), i(), i()); }); assertTableEquals(z2, z); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { yqt.getRowSet().writableCast().insertRange(size, size * 2); yqt.notifyListeners(RowSetFactory.fromRange(size, size * 2), i(), i()); }); @@ -1355,9 +1372,10 @@ public void testLeftIncrementalOverflowRemove() { assertTableEquals(expected, result); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet toRemove = RowSetFactory.fromKeys( - LongStream.range(0, size / 4).map(vv -> vv * 4 + 3).toArray()); + LongStream.range(0, size / 4).map(vv1 -> vv1 * 4 + 3).toArray()); removeRows(leftTable, toRemove); leftTable.notifyListeners( new TableUpdateImpl(i(), toRemove, i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY)); @@ -1409,12 +1427,13 @@ public void testRightIncrementalOverflowModifyKeys() { assertTableEquals(expected, result); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet toModify = - RowSetFactory.fromKeys(LongStream.range(0, size / 4).map(vv -> vv * 4 + 3).toArray()); + RowSetFactory.fromKeys(LongStream.range(0, size / 4).map(vv1 -> vv1 * 4 + 3).toArray()); addToTable(rightTable, toModify, - intCol("RK", IntStream.range(0, size / 4).map(vv -> vv + (size * 4)).toArray()), - intCol("RS", IntStream.range(0, size / 4).map(vv -> 2 * sentinelOffset + vv).toArray())); + intCol("RK", IntStream.range(0, size / 4).map(vv1 -> vv1 + (size * 4)).toArray()), + intCol("RS", IntStream.range(0, size / 4).map(vv1 -> 2 * sentinelOffset + vv1).toArray())); rightTable.notifyListeners(new TableUpdateImpl(i(), i(), toModify, RowSetShiftData.EMPTY, rightTable.newModifiedColumnSet("RK"))); }); @@ -1511,10 +1530,11 @@ protected void testIncrementalWithKeyColumns(final String ctxt, final int initia TableTools.showWithRowSet(rightStatic); } + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { System.out.println("Seed = " + seed + ", size = " + initialSize + ", step = " + numSteps.intValue()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final int stepInstructions = random.nextInt(); if (stepInstructions % 4 != 1) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, @@ -1583,10 +1603,11 @@ public double getTargetLoadFactor() { TableTools.showWithRowSet(rightTicking); } + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { final long rightOffset = numSteps.getValue(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftTicking, i(numSteps.getValue()), longCol("intCol", numSteps.getValue())); TableUpdateImpl up = new TableUpdateImpl(); up.shifted = RowSetShiftData.EMPTY; diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableNaturalJoinTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableNaturalJoinTest.java index c57fc05097e..20b8b99a7fc 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableNaturalJoinTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableNaturalJoinTest.java @@ -6,6 +6,7 @@ import io.deephaven.base.FileUtils; import io.deephaven.chunk.ObjectChunk; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; @@ -22,7 +23,6 @@ import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.*; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.parquet.table.ParquetTools; import io.deephaven.test.types.OutOfBandTest; @@ -94,7 +94,10 @@ public Table e() { fillRehashKeys(offset, leftJoinKey, leftSentinel, rightJoinKey, rightSentinel); final int foffset = offset; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // make something that exists go away + // make something that did not exist come back + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet addRowSet = RowSetFactory.fromRange(foffset, foffset + leftJoinKey.length - 1); addToTable(leftTable, addRowSet, stringCol("JoinKey", leftJoinKey), intCol("LeftSentinel", leftSentinel)); @@ -368,12 +371,13 @@ private void testNaturalJoinMixedGroupingLeftStatic(int leftSize, int rightSize, final Table resultFlat = leftFlat.naturalJoin(rightTable, "I1", "LC1=C1,LC2=C2"); assertTableEquals(noGroupingResult, resultFlat); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < steps; ++step) { if (RefreshingTableTestCase.printTableUpdates) { System.out.println("Step = " + step); } - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, rightSize, random, rightTable, rightColumnInfos); }); @@ -610,7 +614,8 @@ private void testNaturalJoinDuplicateRightsRefreshingRight(Class clazz, F cj2.addUpdateListener(listener); try (final ErrorExpectation ignored = new ErrorExpectation()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(right2, i(3), col("Symbol", a), intCol("RightSentinel", 10)); right2.notifyListeners(i(3), i(), i()); }); @@ -653,7 +658,8 @@ private void testNaturalJoinDuplicateRightsRefreshingBoth(Class clazz, Fu cj2.addUpdateListener(listener); try (final ErrorExpectation ignored = new ErrorExpectation()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(right2, i(3), col("Symbol", a), intCol("RightSentinel", 10)); right2.notifyListeners(i(3), i(), i()); }); @@ -714,7 +720,8 @@ public void testNaturalJoinZeroKeys() { TableTools.showWithRowSet(cj); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(c1, i(1), intCol("Right", 4)); c1.notifyListeners(i(1), i(), i()); }); @@ -724,7 +731,7 @@ public void testNaturalJoinZeroKeys() { final Table fourRightResult = newTable(intCol("Left", 1, 2, 3), intCol("Right", 4, 4, 4)); assertTableEquals(fourRightResult, cj); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(c1, i(1)); c1.notifyListeners(i(), i(1), i()); }); @@ -733,7 +740,7 @@ public void testNaturalJoinZeroKeys() { assertTableEquals(emptyRightResult, cj); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(c0, i(6), intCol("Left", 6)); addToTable(c1, i(2), intCol("Right", 5)); c0.notifyListeners(i(6), i(), i()); @@ -756,7 +763,8 @@ public void testNaturalJoinZeroKeysStaticRight() { final Table cj1 = c0.naturalJoin(c1, ""); assertTableEquals(newTable(intCol("Left", 1, 2, 3), intCol("Right", NULL_INT, NULL_INT, NULL_INT)), cj1); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(c0, i(6), intCol("Left", 6)); c0.notifyListeners(i(6), i(), i()); }); @@ -767,7 +775,7 @@ public void testNaturalJoinZeroKeysStaticRight() { final Table cj2 = c0.naturalJoin(c2, ""); assertTableEquals(newTable(intCol("Left", 1, 2, 3, 6), intCol("Right", 4, 4, 4, 4)), cj2); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(c0, i(7), intCol("Left", 7)); c0.notifyListeners(i(7), i(), i()); }); @@ -791,7 +799,8 @@ public void testNaturalJoinZeroKeysStaticLeft() { TableTools.showWithRowSet(cj); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(c1, i(1), intCol("Right", 4)); c1.notifyListeners(i(1), i(), i()); }); @@ -801,7 +810,7 @@ public void testNaturalJoinZeroKeysStaticLeft() { final Table fourRightResult = newTable(intCol("Left", 1, 2, 3), intCol("Right", 4, 4, 4)); assertTableEquals(fourRightResult, cj); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(c1, i(1)); c1.notifyListeners(i(), i(1), i()); }); @@ -976,7 +985,8 @@ public void testNaturalJoinInactive() { assertEquals(3, DataAccessHelpers.getColumn(cj, "Y").get(0)); assertNull(DataAccessHelpers.getColumn(cj, "Y").get(1)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(c1, i(2)); c1.notifyListeners(i(), i(2), i()); }); @@ -988,7 +998,7 @@ public void testNaturalJoinInactive() { assertEquals(3, DataAccessHelpers.getColumn(cj, "Y").get(0)); assertNull(DataAccessHelpers.getColumn(cj, "Y").get(1)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(c0, i(2), col("USym0", "B"), col("X", 6)); c0.notifyListeners(i(2), i(), i()); }); @@ -1024,7 +1034,8 @@ public Table e() { } } }; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftQueryTable, i(3, 9), col("Sym", "aa", "aa"), col("ByteCol", (byte) 20, (byte) 10), col("DoubleCol", 2.1, 2.2)); System.out.println("Left Table Updated:"); @@ -1033,8 +1044,7 @@ public Table e() { }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> leftQueryTable.notifyListeners(i(), i(), i(1, 2, 4, 6))); + updateGraph.runWithinUnitTestCycle(() -> leftQueryTable.notifyListeners(i(), i(), i(1, 2, 4, 6))); TstUtils.validate(en); } @@ -1126,7 +1136,8 @@ public Table e() { System.out.println("Right Table 1:"); TableTools.showWithRowSet(rightQueryTable1); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftQueryTable, i(3, 9), col("Sym", "aa", "aa"), col("intCol", 20, 10), col("doubleCol", 2.1, 2.2)); System.out.println("Left Table Updated:"); @@ -1135,20 +1146,20 @@ public Table e() { }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftQueryTable, i(1, 9), col("Sym", "bc", "aa"), col("intCol", 30, 11), col("doubleCol", 2.1, 2.2)); leftQueryTable.notifyListeners(i(), i(), i(1, 9)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable1, i(3, 4), col("Sym", "ab", "ac"), col("xCol", 55, 33), col("yCol", 6.6, 7.7)); rightQueryTable1.notifyListeners(i(4), i(), i(3)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { show(rightQueryTable2); addToTable(rightQueryTable2, i(20, 40), col("Sym", "aa", "bc"), col("xCol", 30, 50), @@ -1159,25 +1170,25 @@ public Table e() { TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable1, i(4, 6), col("Sym", "bc", "aa"), col("xCol", 66, 44), col("yCol", 7.6, 6.7)); rightQueryTable1.notifyListeners(i(), i(), i(4, 6)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable1, i(4, 6), col("Sym", "bc", "aa"), col("xCol", 66, 44), col("yCol", 7.7, 6.8)); rightQueryTable1.notifyListeners(i(), i(), i(4, 6)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable1, i(4, 31), col("Sym", "aq", "bc"), col("xCol", 66, 44), col("yCol", 7.5, 6.9)); rightQueryTable1.notifyListeners(i(31), i(), i(4)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable2, i(20, 30), col("Sym", "aa", "aa"), col("xCol", 20, 30), col("yCol", 3.1, 5.1)); @@ -1186,14 +1197,14 @@ public Table e() { TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(rightQueryTable1, i(4)); rightQueryTable1.notifyListeners(i(), i(4), i()); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable2, i(40), col("Sym", "bc"), col("xCol", 20), col("yCol", 3.2)); @@ -1202,7 +1213,7 @@ public Table e() { }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(leftQueryTable, i(9)); dumpComplete(leftQueryTable, "Sym", "intCol"); leftQueryTable.notifyListeners(i(), i(9), i()); @@ -1271,21 +1282,22 @@ public Table e() { } } }; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftQueryTable, i(3, 9), col("Sym", "aa", "aa"), col("intCol", 20, 10), col("doubleCol", 2.1, 2.2)); leftQueryTable.notifyListeners(i(3, 9), i(), i()); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftQueryTable, i(1, 9), col("Sym", "bc", "aa"), col("intCol", 30, 11), col("doubleCol", 2.1, 2.2)); leftQueryTable.notifyListeners(i(), i(), i(1, 9)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { show(rightQueryTable2); addToTable(rightQueryTable2, i(20, 40), col("Sym", "aa", "bc"), col("xCol", 30, 50), @@ -1295,7 +1307,7 @@ public Table e() { }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable2, i(20, 30), col("Sym", "aa", "aa"), col("xCol", 20, 30), col("yCol", 3.1, 5.1)); @@ -1303,7 +1315,7 @@ public Table e() { }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable2, i(40), col("Sym", "bc"), col("xCol", 20), col("yCol", 3.2)); @@ -1312,7 +1324,7 @@ public Table e() { }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(leftQueryTable, i(9)); leftQueryTable.notifyListeners(i(), i(9), i()); }); @@ -1379,42 +1391,43 @@ public Table e() { TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftTable, i(0, 1, 2), col("Sym", "c", "a", "b"), col("Size", 1, 2, 3)); leftTable.notifyListeners(i(), i(), i(0, 1, 2)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightTable, i(0, 1, 2), col("Sym", "b", "c", "a"), col("Qty", 10, 20, 30)); rightTable.notifyListeners(i(), i(), i(0, 1, 2)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftTable, i(0, 1, 2), col("Sym", "a", "b", "c"), col("Size", 3, 1, 2)); leftTable.notifyListeners(i(), i(), i(0, 1, 2)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightTable, i(0, 1, 2), col("Sym", "a", "b", "c"), col("Qty", 30, 10, 20)); rightTable.notifyListeners(i(), i(), i(0, 1, 2)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(leftTable, i(3, 4), col("Sym", "d", "e"), col("Size", -1, 100)); leftTable.notifyListeners(i(3, 4), i(), i()); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(rightTable, i(3, 4), col("Sym", "e", "d"), col("Qty", -10, 50)); rightTable.notifyListeners(i(3, 4), i(), i()); @@ -1546,12 +1559,12 @@ public void testDHC3202_v1() { // noinspection unused final Table joinTable = leftTable.naturalJoin(rightTable, "idx=idx", "RightValue"); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 0; ii < 10; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( - () -> { - generateAppends(10_000, random, leftTable, leftColumnInfo); - generateAppends(10_000, random, rightTable, rightColumnInfo); - }); + updateGraph.runWithinUnitTestCycle(() -> { + generateAppends(10_000, random, leftTable, leftColumnInfo); + generateAppends(10_000, random, rightTable, rightColumnInfo); + }); } } @@ -1575,12 +1588,12 @@ public void testDHC3202_v2() { // noinspection unused final Table joinTable = leftTable.naturalJoin(rightTable, "idx=idx", "RightValue"); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 0; ii < 10; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( - () -> { - generateAppends(100_000, random, leftTable, leftColumnInfo); - generateAppends(100_000, random, rightTable, rightColumnInfo); - }); + updateGraph.runWithinUnitTestCycle(() -> { + generateAppends(100_000, random, leftTable, leftColumnInfo); + generateAppends(100_000, random, rightTable, rightColumnInfo); + }); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSelectUpdateTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSelectUpdateTest.java index 7a0b9e01ac9..7434df60580 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSelectUpdateTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSelectUpdateTest.java @@ -7,42 +7,43 @@ import io.deephaven.base.testing.BaseArrayTestCase; import io.deephaven.configuration.Configuration; import io.deephaven.engine.context.ExecutionContext; -import io.deephaven.engine.rowset.WritableRowSet; +import io.deephaven.engine.context.QueryScope; +import io.deephaven.engine.liveness.LivenessScope; +import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.rowset.WritableRowSet; +import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; +import io.deephaven.engine.table.impl.select.DhFormulaColumn; +import io.deephaven.engine.table.impl.select.FormulaCompilationException; import io.deephaven.engine.table.impl.sources.InMemoryColumnSource; +import io.deephaven.engine.table.impl.sources.LongSparseArraySource; import io.deephaven.engine.table.impl.sources.RedirectedColumnSource; import io.deephaven.engine.table.impl.sources.SparseArrayColumnSource; +import io.deephaven.engine.table.impl.util.RuntimeMemory; import io.deephaven.engine.testutil.*; +import io.deephaven.engine.testutil.QueryTableTestBase.ListenerWithGlobals; +import io.deephaven.engine.testutil.QueryTableTestBase.TableComparator; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.util.TableTools; -import io.deephaven.engine.liveness.LivenessScope; -import io.deephaven.engine.liveness.LivenessScopeStack; -import io.deephaven.engine.testutil.QueryTableTestBase.ListenerWithGlobals; -import io.deephaven.engine.testutil.QueryTableTestBase.TableComparator; -import io.deephaven.engine.table.impl.select.DhFormulaColumn; -import io.deephaven.engine.table.impl.select.FormulaCompilationException; -import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.table.impl.sources.LongSparseArraySource; -import io.deephaven.engine.table.impl.util.*; -import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.util.SafeCloseable; import junit.framework.TestCase; import org.apache.commons.lang3.mutable.MutableInt; -import org.junit.*; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; import java.util.*; import java.util.function.Supplier; -import static io.deephaven.engine.util.TableTools.*; import static io.deephaven.engine.testutil.TstUtils.*; +import static io.deephaven.engine.util.TableTools.*; import static java.util.Collections.emptyList; /** @@ -99,7 +100,8 @@ public void doTestSelectAndUpdate() { final ShiftObliviousListener table2Listener = base.newListenerWithGlobals(table2); table2.addUpdateListener(table2Listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 9), col("x", 4, 5), col("y", 'd', 'e')); table.notifyListeners(i(7, 9), i(), i()); }); @@ -116,7 +118,7 @@ public void doTestSelectAndUpdate() { TestCase.assertEquals(i(), base.removed); TestCase.assertEquals(i(), base.modified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 9), col("x", 3, 10), col("y", 'e', 'd')); table.notifyListeners(i(), i(), i(7, 9)); }); @@ -130,7 +132,7 @@ public void doTestSelectAndUpdate() { TestCase.assertEquals(i(), base.removed); TestCase.assertEquals(i(7, 9), base.modified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(table, i(2, 6, 7)); table.notifyListeners(i(), i(2, 6, 7), i()); }); @@ -144,7 +146,7 @@ public void doTestSelectAndUpdate() { TestCase.assertEquals(i(2, 6, 7), base.removed); TestCase.assertEquals(i(), base.modified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(table, i(9)); addToTable(table, i(2, 4, 6), col("x", 1, 22, 3), col("y", 'a', 'x', 'c')); table.notifyListeners(i(2, 6), i(9), i(4)); @@ -176,7 +178,7 @@ public void doTestSelectAndUpdate() { final ShiftObliviousListener table7Listener2 = base.newListenerWithGlobals(table7); table7.addUpdateListener(table7Listener2); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table6, i(7, 9), col("x", 4, 5), col("y", 'd', 'e')); table6.notifyListeners(i(7, 9), i(), i()); }); @@ -184,7 +186,7 @@ public void doTestSelectAndUpdate() { assertTableEquals(TableTools.newTable(intCol("x", 2, 3, 4, 5, 6), charCol("y", 'a', 'b', 'c', 'd', 'e'), intCol("z", 1, 2, 3, 4, 5), intCol("t", -1, 0, 1, 2, 3)), table7); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table6, i(7, 9), col("x", 3, 10), col("y", 'e', 'd')); table6.notifyListeners(i(), i(), i(7, 9)); }); @@ -195,7 +197,7 @@ public void doTestSelectAndUpdate() { TestCase.assertEquals(i(7, 9), base.modified); TestCase.assertEquals(i(), base.removed); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(table6, i(2, 6, 7)); table6.notifyListeners(i(), i(2, 6, 7), i()); }); @@ -207,7 +209,7 @@ public void doTestSelectAndUpdate() { TestCase.assertEquals(i(2, 6, 7), base.removed); TestCase.assertEquals(i(), base.modified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(table6, i(9)); addToTable(table6, i(2, 4, 6), col("x", 1, 22, 3), col("y", 'a', 'x', 'c')); table6.notifyListeners(i(2, 6), i(9), i(4)); @@ -498,10 +500,11 @@ private void doTestSparseRedirectedUpdate() { final long startUsedMemory = sample.totalMemory - sample.freeMemory; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < 10000; ++step) { final int fstep = step; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final long keyToAdd = fstep + 1; final RowSet addedRowSet = i(keyToAdd); final RowSet removedRowSet = (fstep % 2 == 0) ? i(fstep) : i(); @@ -673,7 +676,8 @@ private void testUpdateIncremental(final int seed, MutableInt numSteps) { @Test public void testUpdateIncrementalRandomized() { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 2, 1); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 2, 1); final boolean old = QueryTable.FORCE_PARALLEL_SELECT_AND_UPDATE; try { QueryTable.FORCE_PARALLEL_SELECT_AND_UPDATE = true; @@ -695,7 +699,8 @@ public void testUpdateIncrementalRandomized() { @Test public void testUpdateIncrementalRandomizedLarge() { // this test has large enough size that we will have individual column updates spread across threads - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 2, 1); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 2, 1); final boolean old = QueryTable.FORCE_PARALLEL_SELECT_AND_UPDATE; try { QueryTable.FORCE_PARALLEL_SELECT_AND_UPDATE = true; @@ -785,7 +790,8 @@ public void testUpdateEmptyTable() { TestCase.assertEquals(0, table.size()); TestCase.assertEquals(0, table2.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { show(table2); addToTable(table, i(7, 9)); table.notifyListeners(i(7, 9), i(), i()); @@ -804,7 +810,7 @@ public void testUpdateEmptyTable() { TestCase.assertEquals(base.removed, i()); TestCase.assertEquals(base.modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { }); } @@ -818,7 +824,8 @@ public void testUpdateIndex() { TestCase.assertEquals(0, table.size()); TestCase.assertEquals(0, table2.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { show(table2); addToTable(table, i(7, 9)); table.notifyListeners(i(7, 9), i(), i()); @@ -836,7 +843,7 @@ public void testUpdateIndex() { TestCase.assertEquals(base.removed, i()); TestCase.assertEquals(base.modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i(9))); + updateGraph.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i(9))); TestCase.assertEquals(2, table.size()); TestCase.assertEquals(2, table2.size()); @@ -861,7 +868,8 @@ public void testUpdateArrayColumns() { TestCase.assertEquals(0, table.size()); TestCase.assertEquals(0, table2.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { show(table2); addToTable(table, i(7, 9)); table.notifyListeners(i(7, 9), i(), i()); @@ -880,7 +888,7 @@ public void testUpdateArrayColumns() { TestCase.assertEquals(i(), base.removed); TestCase.assertEquals(i(), base.modified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i(9))); + updateGraph.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i(9))); TestCase.assertEquals(2, table.size()); TestCase.assertEquals(2, table2.size()); @@ -961,7 +969,8 @@ public void testSelectReuse() { assertTableEquals(prevTable(table), prevTable(selected)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2), longCol("Value", 3)); table.notifyListeners(i(2), i(), i()); }); @@ -973,7 +982,7 @@ public void testSelectReuse() { TableTools.show(selected); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(1L << 20 + 2), longCol("Value", 4)); table.notifyListeners(i(1L << 20 + 2), i(), i()); }); @@ -1013,7 +1022,8 @@ public void testEagerParamBinding() { final QueryTable table = TstUtils.testRefreshingTable(i().toTracking()); final QueryTable table2 = (QueryTable) table.update("A = i * scale"); QueryScope.addParam("scale", "Multiplying i by this string will not compile"); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { show(table); addToTable(table, i(10, 20)); table.notifyListeners(i(10, 20), i(), i()); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSliceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSliceTest.java index d49fe15b271..90d0609b11b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSliceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSliceTest.java @@ -3,18 +3,15 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.primitive.function.CharConsumer; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableUpdate; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.testutil.QueryTableTestBase; -import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.generator.SortedLongGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -371,11 +368,12 @@ public void testTailWithGrowth() { for (int i = 0; i < steps; ++i) { final long ii = i; final long jj = j; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - RowSet added = RowSetFactory.fromRange(ii * jj, (ii + 1) * jj - 1); - upTable.getRowSet().writableCast().insert(added); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + RowSet added1 = RowSetFactory.fromRange(ii * jj, (ii + 1) * jj - 1); + upTable.getRowSet().writableCast().insert(added1); TableUpdate update = - new TableUpdateImpl(added, RowSetFactory.empty(), + new TableUpdateImpl(added1, RowSetFactory.empty(), RowSetFactory.empty(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY); upTable.notifyListeners(update); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSortTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSortTest.java index 318afc04b11..b577063c09b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSortTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableSortTest.java @@ -7,6 +7,7 @@ import io.deephaven.api.SortColumn; import io.deephaven.base.FileUtils; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.TrackingWritableRowSet; @@ -16,7 +17,6 @@ import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.*; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.context.QueryScope; import io.deephaven.parquet.table.ParquetTools; import io.deephaven.engine.util.TableTools; @@ -183,7 +183,8 @@ public void testSort2() { assertTableEquals(testTable(col("A", 1, 2, 3), col("B", "a", "b", "c")), sorted); assertTrue(SortedColumnsAttribute.isSortedBy(sorted, "A", SortingOrder.Ascending)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(20), col("A", 1), col("B", "A")); table.notifyListeners(i(), i(), i(20)); }); @@ -191,7 +192,7 @@ public void testSort2() { assertTableEquals(testTable(col("A", 1, 2, 3), col("B", "A", "b", "c")), sorted); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(20), col("A", 1), col("B", "A2")); addToTable(table, i(25), col("A", 1), col("B", "A2'")); table.notifyListeners(i(25), i(), i(20)); @@ -200,7 +201,7 @@ public void testSort2() { assertTableEquals(testTable(col("A", 1, 1, 2, 3), col("B", "A2", "A2'", "b", "c")), sorted); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(20, 25), col("A", 1, 3), col("B", "A3", "C2")); table.notifyListeners(i(), i(), i(20, 25)); }); @@ -494,9 +495,10 @@ private void performTestsInDirection(String what, long addedRatioLimit, long rem long adds = 0, removes = 0, modifies = 0, shifts = 0, modifiedColumns = 0; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 1; ii < values.length; ++ii) { final int fii = ii; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(fii), col("intCol", values[fii])); queryTable.notifyListeners(i(fii), i(), i()); }); @@ -551,19 +553,20 @@ public void testSortIncremental() { "Double Sort") }; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3, 9), col("Sym", "aa", "aa"), col("intCol", 20, 10), col("doubleCol", 2.1, 2.2)); queryTable.notifyListeners(i(3, 9), i(), i()); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(1, 9), col("Sym", "bc", "aa"), col("intCol", 30, 11), col("doubleCol", 2.1, 2.2)); queryTable.notifyListeners(i(), i(), i(1, 9)); }); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(queryTable, i(9)); queryTable.notifyListeners(i(), i(9), i()); }); @@ -612,10 +615,11 @@ public void testGrowingMergeReinterpret() { final Table viewed = table.update("Timestamp='2019-04-11T09:30 NY' + (ii * 60L * 1000000000L)"); final Table sorted = TableTools.merge(viewed, viewed).sortDescending("Timestamp"); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 2; ii < 10000; ++ii) { // Use large enough indices that we blow beyond merge's initially reserved 64k key-space. final int fii = 8059 * ii; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(fii), col("Sentinel", fii)); table.notifyListeners(i(fii), i(), i()); }); @@ -681,8 +685,9 @@ private void doReinterpretTestIncremental(Table table) { final Table boolSorted = filtered.sort("Truthiness"); final Table boolInverseSorted = boolSorted.sortDescending("Timestamp"); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (filtered.size() < merged.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(filter::run); + updateGraph.runWithinUnitTestCycle(filter::run); } final TIntList sentinels = new TIntArrayList(); @@ -733,7 +738,8 @@ public void testDh11506() { final Table ss = ms.sortDescending("Symbol", "X"); TableTools.showWithRowSet(s); assertTableEquals(ss, s); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(y, i(10), col("Symbol", "B"), col("X", "5"), intCol("Y", 109)); y.notifyListeners(i(10), i(), i()); }); @@ -783,10 +789,11 @@ private void doSymbolTableIncrementalTest(Table table) { final Table symbolSorted = refreshing.sort("Symbol"); showWithRowSet(symbolSorted); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet added = table.getRowSet().subSetByPositionRange(4, 10); - rowSet.insert(added); - refreshing.notifyListeners(added, i(), i()); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet added1 = table.getRowSet().subSetByPositionRange(4, 10); + rowSet.insert(added1); + refreshing.notifyListeners(added1, i(), i()); }); showWithRowSet(symbolSorted); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTest.java index 2d64c2f6563..e4d7d3fd11e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTest.java @@ -11,9 +11,12 @@ import io.deephaven.api.snapshot.SnapshotWhenOptions.Flag; import io.deephaven.base.FileUtils; import io.deephaven.base.Pair; +import io.deephaven.base.log.LogOutput; import io.deephaven.base.verify.AssertionFailure; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; +import io.deephaven.engine.exceptions.UpdateGraphConflictException; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.liveness.SingletonLivenessManager; import io.deephaven.engine.rowset.*; @@ -33,17 +36,21 @@ import io.deephaven.engine.testutil.generator.*; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.engine.updategraph.UpdateGraphLock; import io.deephaven.engine.util.TableTools; +import io.deephaven.io.log.LogEntry; import io.deephaven.parquet.table.ParquetTools; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; import io.deephaven.util.QueryConstants; import io.deephaven.util.SafeCloseable; +import io.deephaven.util.locks.AwareFunctionalLock; import io.deephaven.vector.*; import junit.framework.TestCase; import org.apache.commons.lang3.mutable.MutableObject; import org.apache.groovy.util.Maps; +import org.jetbrains.annotations.MustBeInvokedByOverriders; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Assert; @@ -68,10 +75,10 @@ /** * Test of QueryTable functionality. - * + *

* This test used to be a catch all, but at over 7,000 lines became unwieldy. It is still somewhat of a catch-all, but * some specific classes of tests have been broken out. - * + *

* See also {@link QueryTableAggregationTest}, {@link QueryTableJoinTest}, {@link QueryTableSelectUpdateTest}, * {@link QueryTableFlattenTest}, and {@link QueryTableSortTest}. */ @@ -292,7 +299,8 @@ public void testView() { final ShiftObliviousListener table2Listener = newListenerWithGlobals(table2); table2.addUpdateListener(table2Listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table1, i(7, 9), col("x", 4, 5), col("y", 'd', 'e')); table1.notifyListeners(i(7, 9), i(), i()); }); @@ -303,7 +311,7 @@ public void testView() { assertEquals(modified, i()); assertEquals(removed, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table1, i(7, 9), col("x", 3, 10), col("y", 'e', 'd')); table1.notifyListeners(i(), i(), i(7, 9)); }); @@ -314,7 +322,7 @@ public void testView() { assertEquals(modified, i(7, 9)); assertEquals(removed, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table1, i(2, 6, 7)); table1.notifyListeners(i(), i(2, 6, 7), i()); }); @@ -326,7 +334,7 @@ public void testView() { assertEquals(removed, i(2, 6, 7)); assertEquals(modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table1, i(9)); addToTable(table1, i(2, 4, 6), col("x", 1, 22, 3), col("y", 'a', 'x', 'c')); table1.notifyListeners(i(2, 6), i(9), i(4)); @@ -345,7 +353,7 @@ public void testView() { final ShiftObliviousListener table4Listener = newListenerWithGlobals(table4); table4.addUpdateListener(table4Listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table3, i(7, 9), col("x", 4, 5), col("y", 'd', 'e')); table3.notifyListeners(i(7, 9), i(), i()); }); @@ -358,7 +366,7 @@ public void testView() { assertEquals(removed, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table3, i(7, 9), col("x", 3, 10), col("y", 'e', 'd')); table3.notifyListeners(i(), i(), i(7, 9)); }); @@ -370,7 +378,7 @@ public void testView() { assertEquals(modified, i(7, 9)); assertEquals(removed, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table3, i(2, 6, 7)); table3.notifyListeners(i(), i(2, 6, 7), i()); }); @@ -383,7 +391,7 @@ public void testView() { assertEquals(modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table3, i(9)); addToTable(table3, i(2, 4, 6), col("x", 1, 22, 3), col("y", 'a', 'x', 'c')); table3.notifyListeners(i(2, 6), i(9), i(4)); @@ -960,7 +968,8 @@ public void testReverse() { checkReverse(table, reversed, "Ticker"); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final ColumnHolder[] columnAdditions = new ColumnHolder[] {col("Ticker", "SPY", "VXX"), col("Timestamp", 60L, 70L)}; addToTable(table, i(2048, 2049), columnAdditions); @@ -974,7 +983,7 @@ public void testReverse() { assertEquals("TSLA", reversed.getColumnSource("Ticker").getPrev(reversed.getRowSet().copyPrev().get(0))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { }); assertEquals("VXX", reversed.getColumnSource("Ticker").getPrev(reversed.getRowSet().copyPrev().get(0))); @@ -1007,7 +1016,7 @@ public long getLong(long rowKey) { assertEquals(Integer.MAX_VALUE, (long) licsr.get(1)); assertEquals(0, (long) licsr.get(2)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { bigTable.getRowSet().writableCast().insert(Long.MAX_VALUE); bigTable.notifyListeners(i(Long.MAX_VALUE), i(), i()); }); @@ -1030,9 +1039,10 @@ public void testReverse2() { checkReverse(table, reversed, "Timestamp"); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final ColumnHolder[] columnAdditions = new ColumnHolder[] {col("Timestamp", 2048L, 2049L)}; - addToTable(table, i(2048, 2049), columnAdditions); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + final ColumnHolder[] columnAdditions1 = new ColumnHolder[] {col("Timestamp", 2048L, 2049L)}; + addToTable(table, i(2048, 2049), columnAdditions1); table.notifyListeners(i(2048, 2049), i(), i()); }); @@ -1040,14 +1050,14 @@ public void testReverse2() { checkReverse(table, reversed, "Timestamp"); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(1, 2048, 2049)); table.notifyListeners(i(), i(1, 2048, 2049), i()); }); assertEquals(0, reversed.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final ColumnHolder[] columnAdditions = new ColumnHolder[] {col("Timestamp", 8192L)}; addToTable(table, i(8192L), columnAdditions); table.notifyListeners(i(8192L), i(), i()); @@ -1075,7 +1085,8 @@ public void testReverseClipping() { new io.deephaven.engine.table.impl.SimpleListener(reverseTable); reverseTable.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TableUpdateImpl downstream = new TableUpdateImpl(); downstream.added = i(); downstream.removed = i(); @@ -1101,7 +1112,8 @@ public void testReverseClippingDuringShift() { new io.deephaven.engine.table.impl.SimpleListener(reversedTable); reversedTable.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TableUpdateImpl downstream = new TableUpdateImpl(); downstream.added = i(); downstream.removed = i(); @@ -1164,7 +1176,8 @@ public void testReverseBlink() { final long nextSize = ReverseOperation.MINIMUM_PIVOT + 2; final int[] data = new int[Math.toIntExact(nextSize - 1)]; Arrays.fill(data, 200); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, RowSetFactory.fromRange(1, nextSize - 1), intCol("Sentinel", data)); final TableUpdateImpl downstream = new TableUpdateImpl(); downstream.added = RowSetFactory.flat(nextSize); @@ -1199,14 +1212,15 @@ public void testSnapshot() { final Table expect1 = newTable(col("A", 3, 1, 2), col("B", "c", "a", "b"), col("T", 2, 2, 2)); assertTableEquals(expect1, snapshot); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(base, i(20, 40), col("A", 30, 50), col("B", "aa", "bc")); base.notifyListeners(i(20, 40), i(), i()); }); show(snapshot, 50); assertTableEquals(expect1, snapshot); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger2, i(3), col("T", 5)); trigger2.notifyListeners(i(3), i(), i()); }); @@ -1215,7 +1229,7 @@ public void testSnapshot() { newTable(col("A", 3, 30, 1, 2, 50), col("B", "c", "aa", "a", "b", "bc"), col("T", 5, 5, 5, 5, 5)); assertTableEquals(expect2, snapshot); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(base, i(10, 20, 30)); addToTable(base, i(25), col("A", 11), col("B", "A")); base.notifyListeners(i(), i(10, 20, 30), i(25)); @@ -1223,7 +1237,7 @@ public void testSnapshot() { show(snapshot, 50); assertTableEquals(expect2, snapshot); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger2, i(4, 5), col("T", 7, 8)); trigger2.notifyListeners(i(4, 5), i(), i()); }); @@ -1246,13 +1260,14 @@ public void testSnapshotArrayTrigger() { validateUpdates(actual); assertTableEquals(expected, actual); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(base, i(20, 40), col("A", 30, 50), col("B", "aa", "bc")); base.notifyListeners(i(20, 40), i(), i()); }); assertTableEquals(expected.where("A in 1, 2, 3"), actual); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(left1, i(3), col("T", 5)); left1.notifyListeners(i(3), i(), i()); }); @@ -1274,13 +1289,14 @@ public void testSnapshotArrayValues() { validateUpdates(actual); assertTableEquals(ex1, actual); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(20, 40), col("A", 30, 50), col("B", "aa", "bc")); right.notifyListeners(i(20, 40), i(), i()); }); assertTableEquals(ex1, actual); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger1, i(3), col("T", 5)); trigger1.notifyListeners(i(3), i(), i()); }); @@ -1288,13 +1304,13 @@ public void testSnapshotArrayValues() { col("B", new ObjectVector[] {new ObjectVectorDirect<>("c", "aa", "a", "b", "bc")}), intCol("T", 5)); assertTableEquals(ex2, actual); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(right, i(20), intCol("A", 31), stringCol("B", "aaa")); right.notifyListeners(i(), i(), i(20)); }); assertTableEquals(ex2, actual); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger1, i(4), col("T", 6)); trigger1.notifyListeners(i(4), i(), i()); }); @@ -1320,7 +1336,8 @@ public void testSnapshotHistorical() { col("A", 3, 1, 2, 3, 1, 2), col("B", "c", "a", "b", "c", "a", "b"))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(base, i(20, 40), col("A", 30, 50), col("B", "aa", "bc")); base.notifyListeners(i(20, 40), i(), i()); }); @@ -1330,7 +1347,7 @@ public void testSnapshotHistorical() { col("A", 3, 1, 2, 3, 1, 2), col("B", "c", "a", "b", "c", "a", "b"))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger2, i(3), col("T", 5)); trigger2.notifyListeners(i(3), i(), i()); }); @@ -1340,7 +1357,7 @@ public void testSnapshotHistorical() { col("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50), col("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc"))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(base, i(10, 20, 30)); addToTable(base, i(25), col("A", 11), col("B", "A")); base.notifyListeners(i(), i(10, 20, 30), i(25)); @@ -1351,7 +1368,7 @@ public void testSnapshotHistorical() { col("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50), col("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc"))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger2, i(4, 5), col("T", 7, 8)); trigger2.notifyListeners(i(4, 5), i(), i()); }); @@ -1361,7 +1378,7 @@ public void testSnapshotHistorical() { col("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50, 11, 50, 11, 50), col("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc", "A", "bc", "A", "bc"))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet rowsToRemove = base.getRowSet().copy(); removeRows(base, rowsToRemove); base.notifyListeners(i(), rowsToRemove, i()); @@ -1372,7 +1389,7 @@ public void testSnapshotHistorical() { col("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50, 11, 50, 11, 50), col("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc", "A", "bc", "A", "bc"))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger2, i(6), col("T", 9)); trigger2.notifyListeners(i(6), i(), i()); }); @@ -1395,49 +1412,70 @@ public void testSnapshotDependencies() { final Table snappedOfSnap = snappedDep.snapshotWhen(trigger, Flag.INITIAL); validateUpdates(snappedOfSnap); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); - }); - - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + }); + + // This will do the notification for left; at which point we can do the first snapshot + // This should flush the TUV and the select + // Now we should flush the second snapshot + // This should flush the second TUV + // And now we should be done + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger, i(2), col("T", 2)); trigger.notifyListeners(i(2), i(), i()); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // This will do the notification for left; at which point we can do the first snapshot - boolean flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + boolean flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // This should flush the TUV and the select - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // Now we should flush the second snapshot - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // This should flush the second TUV - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); // And now we should be done - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertFalse(flushed); }); TableTools.show(snappedOfSnap); @@ -1453,7 +1491,8 @@ public void testSnapshotAdditions() { final Table snapshot = base.snapshotWhen(trigger, Flag.INITIAL); validateUpdates(snapshot); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(base, i(20), col("A", 2)); trigger.notifyListeners(i(), i(), i(0)); }); @@ -1469,7 +1508,8 @@ public void testSnapshotRemovals() { final Table snapshot = base.snapshotWhen(trigger, Flag.INITIAL); validateUpdates(snapshot); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(base, i(20)); trigger.notifyListeners(i(), i(), i(0)); }); @@ -1485,7 +1525,8 @@ public void testSnapshotModifies() { final Table snapshot = base.snapshotWhen(trigger, Flag.INITIAL); validateUpdates(snapshot); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { base.notifyListeners(i(), i(), i(20)); trigger.notifyListeners(i(), i(), i(0)); }); @@ -1504,122 +1545,162 @@ public void testSnapshotIncrementalDependencies() { final Table snappedDep = snappedFirst.select("B=testSnapshotDependenciesCounter.incrementAndGet()"); final Table snappedOfSnap = snappedDep.snapshotWhen(trigger, Flag.INCREMENTAL); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { System.out.println("Checking everything is satisfied with no updates."); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Simple Update Cycle Complete."); }); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { System.out.println("Adding Table."); addToTable(trigger, i(2), col("T", 2)); trigger.notifyListeners(i(2), i(), i()); System.out.println("Checking initial satisfaction."); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing first notification."); // this will do the notification for left - boolean flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + boolean flushed2 = updateGraph.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #1."); - TestCase.assertTrue(flushed); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(flushed2); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing second notification, which should be our listener recorder"); - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed2 = updateGraph.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #2."); - TestCase.assertTrue(flushed); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(flushed2); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing third notification, which should be our merged listener"); System.out.println("Checking satisfaction after #3."); - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed2 = updateGraph.flushOneNotificationForUnitTests(); // this will do the merged notification; which means the snaphsot is satisfied - TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(flushed2); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // now we should flush the select - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); - TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + flushed2 = updateGraph.flushOneNotificationForUnitTests(); + TestCase.assertTrue(flushed2); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // now we should flush the second snapshot recorder - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); - TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + flushed2 = updateGraph.flushOneNotificationForUnitTests(); + TestCase.assertTrue(flushed2); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // now we should flush the second snapshot merged listener - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); - TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + flushed2 = updateGraph.flushOneNotificationForUnitTests(); + TestCase.assertTrue(flushed2); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // nothing left - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); - TestCase.assertFalse(flushed); + flushed2 = updateGraph.flushOneNotificationForUnitTests(); + TestCase.assertFalse(flushed2); }); TableTools.show(snappedOfSnap); TestCase.assertEquals(snappedOfSnap.size(), 1); TestCase.assertEquals(DataAccessHelpers.getColumn(snappedOfSnap, "B").get(0), 1); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // this will do the notification for right; at which point we can should get the update going through + // nothing left + updateGraph.runWithinUnitTestCycle(() -> { System.out.println("Adding Right Table."); addToTable(base, i(2), col("A", 3)); base.notifyListeners(i(2), i(), i()); System.out.println("Checking initial satisfaction."); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing first notification."); // this will do the notification for right; at which point we can should get the update going through - boolean flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + boolean flushed1 = updateGraph.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #1."); - TestCase.assertTrue(flushed); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(flushed1); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing second notification, which should be our merged listener"); - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed1 = updateGraph.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #2."); - TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(flushed1); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // nothing left - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); - TestCase.assertFalse(flushed); + flushed1 = updateGraph.flushOneNotificationForUnitTests(); + TestCase.assertFalse(flushed1); }); TableTools.show(snappedOfSnap); TestCase.assertEquals(snappedOfSnap.size(), 1); TestCase.assertEquals(DataAccessHelpers.getColumn(snappedOfSnap, "B").get(0), 1); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // now we should flush the select + // now we should flush the second snapshot recorder + // now we should flush the second snapshot merged listener + // nothing left + updateGraph.runWithinUnitTestCycle(() -> { System.out.println("Adding Right Table."); addToTable(base, i(2), col("A", 3)); base.notifyListeners(i(2), i(), i()); @@ -1629,68 +1710,92 @@ public void testSnapshotIncrementalDependencies() { trigger.notifyListeners(i(3), i(), i()); System.out.println("Checking initial satisfaction."); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing first notification."); - boolean flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + boolean flushed = updateGraph.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #1."); TestCase.assertTrue(flushed); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing second notification, which should be the recorder for our second snapshot"); - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #2."); TestCase.assertTrue(flushed); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing third notification, which should be our right recorder"); - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #3."); TestCase.assertTrue(flushed); - TestCase.assertFalse(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); System.out.println("Flushing fourth notification, which should be our MergedListener"); - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #4."); TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // now we should flush the select - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // now we should flush the second snapshot recorder - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertFalse( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // now we should flush the second snapshot merged listener - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue(snappedFirst.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedDep.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(snappedOfSnap.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue( + snappedFirst.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedDep.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); + TestCase.assertTrue( + snappedOfSnap.satisfied(ExecutionContext.getContext().getUpdateGraph().clock().currentStep())); // nothing left - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertFalse(flushed); }); TableTools.show(snappedOfSnap); @@ -1716,14 +1821,14 @@ public void testWhereInScope() { final SingletonLivenessManager singletonManager = new SingletonLivenessManager(whereIn); // This will release setTable once and whereIn once, Rcs are now (1,2) - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(setScope::close); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(setScope::close); assertEquals(0, whereIn.size()); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); addToTable(setTable, i(0), col("Key", "B")); setTable.notifyListeners(i(0), i(), i()); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); assertEquals(1, whereIn.size()); assertEquals(new Object[] {"B", 2}, DataAccessHelpers.getRecord(whereIn, 0)); @@ -1734,17 +1839,17 @@ public void testWhereInScope() { assertTrue(setTable.tryRetainReference()); setTable.dropReference(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); addToTable(setTable, i(1), col("Key", "D")); setTable.notifyListeners(i(1), i(), i()); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); assertEquals(2, whereIn.size()); assertEquals(new Object[] {"B", 2}, DataAccessHelpers.getRecord(whereIn, 0)); assertEquals(new Object[] {"D", 4}, DataAccessHelpers.getRecord(whereIn, 1)); // Everything is dropped after this, the singletonManager was holding everything. - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(singletonManager::release); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(singletonManager::release); assertFalse(whereIn.tryRetainReference()); assertFalse(setTable.tryRetainReference()); @@ -1770,7 +1875,8 @@ public void testSnapshotIncremental() { snapshot.addUpdateListener(listener = newListenerWithGlobals(snapshot)); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(base, i(20, 40), col("A", 30, 50), col("B", "aa", "bc")); base.notifyListeners(i(20, 40), i(), i()); }); @@ -1778,7 +1884,7 @@ public void testSnapshotIncremental() { assertTableEquals(snapshot, testRefreshingTable(intCol("A"), stringCol("B"), intCol("T"))); assertEquals(listener.getCount(), 0); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger2, i(3), col("T", 5)); trigger2.notifyListeners(i(3), i(), i()); }); @@ -1793,7 +1899,7 @@ public void testSnapshotIncremental() { assertEquals(i(), removed); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(base, i(10, 20, 30)); addToTable(base, i(25, 75), col("A", 11, 34), col("B", "A", "Q")); base.notifyListeners(i(75), i(10, 20, 30), i(25)); @@ -1805,7 +1911,7 @@ public void testSnapshotIncremental() { col("T", 5, 5, 5, 5, 5))); assertEquals(listener.getCount(), 0); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger2, i(4, 5), col("T", 7, 8)); trigger2.notifyListeners(i(4, 5), i(), i()); }); @@ -1834,7 +1940,8 @@ public void testSnapshotIncrementalBigInitial() { assertTableEquals(expected, result); final Table result2 = base.snapshotWhen(trigger, Flag.INCREMENTAL); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger, i(1), col("T", 2)); trigger.notifyListeners(i(1), i(), i()); }); @@ -1863,7 +1970,8 @@ public void testSnapshotIncrementalPrev() { snapshot.addUpdateListener(listener = new io.deephaven.engine.table.impl.SimpleListener(snapshot)); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { assertTableEquals(prevTable(snapshot), firstResult); assertTableEquals(snapshot, firstResult); @@ -1876,7 +1984,7 @@ public void testSnapshotIncrementalPrev() { assertEquals(listener.getCount(), 0); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger, i(3), col("T", 5)); trigger.notifyListeners(i(3), i(), i()); assertEquals("", diff(prevTable(snapshot), firstResult, 10)); @@ -1894,7 +2002,7 @@ public void testSnapshotIncrementalPrev() { assertTrue(listener.update.shifted().empty()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(base, i(10, 20, 30)); addToTable(base, i(25, 75), col("A", 11, 34), col("B", "A", "Q")); base.notifyListeners(i(75), i(10, 20, 30), i(25)); @@ -1903,7 +2011,7 @@ public void testSnapshotIncrementalPrev() { assertTableEquals(snapshot, secondResult); assertEquals(listener.getCount(), 0); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger, i(4, 5), col("T", 7, 8)); trigger.notifyListeners(i(4, 5), i(), i()); }); @@ -1918,12 +2026,12 @@ public void testSnapshotIncrementalPrev() { assertTrue(listener.update.shifted().empty()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(base, i(25), col("A", 12), col("B", "R")); base.notifyListeners(i(), i(), i(25)); }); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(trigger, i(6), col("T", 9)); trigger.notifyListeners(i(6), i(), i()); }); @@ -1976,7 +2084,8 @@ public void testSnapshotIncrementalRandom() { final boolean modRight = random.nextBoolean(); final boolean modifyRightFirst = modRight && modStamp && random.nextBoolean(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { if (printTableUpdates) { System.out.println("Step = " + fstep + ", modStamp=" + modStamp + ", modRight=" + modRight + ", modifyRightFirst=" + modifyRightFirst); @@ -1986,10 +2095,10 @@ public void testSnapshotIncrementalRandom() { GenerateTableUpdates.generateTableUpdates(filteredSize, random, base, rightInfo); } if (modStamp) { - final long lastStamp = stampTable.getRowSet().lastRowKey(); + final long lastStamp1 = stampTable.getRowSet().lastRowKey(); final int numAdditions = 1 + random.nextInt(stampSize); final RowSet stampsToAdd = - RowSetFactory.fromRange(lastStamp + 1, lastStamp + numAdditions); + RowSetFactory.fromRange(lastStamp1 + 1, lastStamp1 + numAdditions); final ColumnHolder[] columnAdditions = new ColumnHolder[stampInfo.length]; for (int ii = 0; ii < columnAdditions.length; ii++) { @@ -2090,13 +2199,14 @@ static void testLegacyFlattenModifications(UnaryOperator function) { final Supplier newUpdate = () -> new TableUpdateImpl(i(), i(), i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3), col("intCol", 30)); removeRows(queryTable, i(2)); - final TableUpdateImpl update = newUpdate.get(); - update.added = i(3); - update.removed = i(2); - queryTable.notifyListeners(update); + final TableUpdateImpl update3 = newUpdate.get(); + update3.added = i(3); + update3.removed = i(2); + queryTable.notifyListeners(update3); }); Assert.assertEquals("simpleListener.getCount() == 1", 1, simpleListener.getCount()); @@ -2107,12 +2217,12 @@ static void testLegacyFlattenModifications(UnaryOperator function) { Assert.assertEquals("simpleListener.update.added = {1}", i(1), simpleListener.update.added()); Assert.assertEquals("simpleListener.update.removed = {1}", i(1), simpleListener.update.removed()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3), col("intCol", 30)); - final TableUpdateImpl update = newUpdate.get(); - update.modified = i(3); - update.modifiedColumnSet = queryTable.newModifiedColumnSet("intCol"); - queryTable.notifyListeners(update); + final TableUpdateImpl update2 = newUpdate.get(); + update2.modified = i(3); + update2.modifiedColumnSet = queryTable.newModifiedColumnSet("intCol"); + queryTable.notifyListeners(update2); }); Assert.assertEquals("simpleListener.getCount() == 2", 2, simpleListener.getCount()); @@ -2122,13 +2232,13 @@ static void testLegacyFlattenModifications(UnaryOperator function) { Assert.assertEquals("simpleListener.update.modified = {1}", i(1), simpleListener.update.modified()); Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, simpleListener.update.shifted().size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3, 5), col("intCol", 30, 50)); - final TableUpdateImpl update = newUpdate.get(); - update.added = i(5); - update.modified = i(3); - update.modifiedColumnSet = queryTable.newModifiedColumnSet("intCol"); - queryTable.notifyListeners(update); + final TableUpdateImpl update1 = newUpdate.get(); + update1.added = i(5); + update1.modified = i(3); + update1.modifiedColumnSet = queryTable.newModifiedColumnSet("intCol"); + queryTable.notifyListeners(update1); }); Assert.assertEquals("simpleListener.getCount() == 3", 3, simpleListener.getCount()); @@ -2145,7 +2255,7 @@ static void testLegacyFlattenModifications(UnaryOperator function) { Assert.assertEquals("simpleListener.update.shifted.getShiftDelta(0) = 1", 1, simpleListener.update.shifted().getShiftDelta(0)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(queryTable, i(4)); final TableUpdateImpl update = newUpdate.get(); update.removed = i(4); @@ -2180,13 +2290,14 @@ static void testShiftingModifications(UnaryOperator function) { final Supplier newUpdate = () -> new TableUpdateImpl(i(), i(), i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3), col("intCol", 30)); removeRows(queryTable, i(2)); - final TableUpdateImpl update = newUpdate.get(); - update.added = i(3); - update.removed = i(2); - queryTable.notifyListeners(update); + final TableUpdateImpl update3 = newUpdate.get(); + update3.added = i(3); + update3.removed = i(2); + queryTable.notifyListeners(update3); }); Assert.assertEquals("simpleListener.getCount() == 1", 1, simpleListener.getCount()); @@ -2197,12 +2308,12 @@ static void testShiftingModifications(UnaryOperator function) { Assert.assertEquals("simpleListener.update.added = {3}", i(3), simpleListener.update.added()); Assert.assertEquals("simpleListener.update.removed = {2}", i(2), simpleListener.update.removed()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3), col("intCol", 30)); - final TableUpdateImpl update = newUpdate.get(); - update.modified = i(3); - update.modifiedColumnSet = queryTable.newModifiedColumnSet("intCol"); - queryTable.notifyListeners(update); + final TableUpdateImpl update2 = newUpdate.get(); + update2.modified = i(3); + update2.modifiedColumnSet = queryTable.newModifiedColumnSet("intCol"); + queryTable.notifyListeners(update2); }); Assert.assertEquals("simpleListener.getCount() == 2", 2, simpleListener.getCount()); @@ -2212,13 +2323,13 @@ static void testShiftingModifications(UnaryOperator function) { Assert.assertEquals("simpleListener.update.modified = {3}", i(3), simpleListener.update.modified()); Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, simpleListener.update.shifted().size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3, 5), col("intCol", 30, 50)); - final TableUpdateImpl update = newUpdate.get(); - update.added = i(5); - update.modified = i(3); - update.modifiedColumnSet = queryTable.newModifiedColumnSet("intCol"); - queryTable.notifyListeners(update); + final TableUpdateImpl update1 = newUpdate.get(); + update1.added = i(5); + update1.modified = i(3); + update1.modifiedColumnSet = queryTable.newModifiedColumnSet("intCol"); + queryTable.notifyListeners(update1); }); Assert.assertEquals("simpleListener.getCount() == 3", 3, simpleListener.getCount()); @@ -2235,7 +2346,7 @@ static void testShiftingModifications(UnaryOperator function) { // Assert.assertEquals("simpleListener.update.shifted.getShiftDelta(0) = 1", 1, // simpleListener.update.shifted.getShiftDelta(0)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(queryTable, i(4)); final TableUpdateImpl update = newUpdate.get(); update.removed = i(4); @@ -2420,7 +2531,8 @@ public void testUngroupableColumnSources() { fail("Expected does not match previous value!"); } - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { }); intPrevDirect = @@ -2447,7 +2559,8 @@ public void testUngroupOverflow() { t1.addUpdateListener(errorListener); // This is too big, we should fail - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final long bigIndex = 1L << 55; addToTable(table, i(bigIndex), intCol("X", 3), new ColumnHolder<>("Y", String[].class, String.class, false, new String[] {"f"})); @@ -2483,7 +2596,8 @@ public void testUngroupWithRebase() { validateUpdates(t1); // This is too big, we should fail - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(9), col("X", 3), new ColumnHolder<>("Y", String[].class, String.class, false, new String[] {"f", "g", "h", "i", "j", "k"})); table.notifyListeners(i(9), i(), i()); @@ -2501,7 +2615,7 @@ public void testUngroupWithRebase() { assertEquals(Arrays.asList("a", "b", "c", "d", "e"), Arrays.asList((String[]) IndexedDataColumn .makePreviousColumn(t1.getRowSet(), t1.getColumnSource("Y")).getDirect())); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { }); assertEquals(Arrays.asList("X", "Y"), t1.getDefinition().getColumnNames()); @@ -2973,7 +3087,18 @@ public void testIds7153() { final MutableObject nj = new MutableObject<>(); final MutableObject ft = new MutableObject<>(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // now is safe to create the nj + // The real test happens here. Off of the UGP thread we do an operation, one that supports concurrent + // instantiation, such that we use prev values when applicable. Assume the parent table has not ticked + // this cycle: 1) if the parent table pre-existed then we want to use prev values (to handle when parent + // is mid-tick but unpublished) 2) if the parent table was created this cycle, then A) prev values are + // undefined, B) it must have been created AFTER any of its dependencies may have ticked this cycle and + // C) the table is not allowed to tick this cycle. + // The specific scenario we are trying to catch is when the parent re-uses data structures (i.e. RowSet) + // from its parent, which have valid prev values, but the prev values must not be used during the first + // cycle. + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newRows = i(2, 4, 18, 20); addToTable(lTable, newRows, col("X", "e", "f", "g", "h")); final TableUpdateImpl update = new TableUpdateImpl(); @@ -3022,7 +3147,7 @@ public void testNoCoalesceOnNotification() { // to coalesce a table and immediately make it garbage. // This regression check verifies that we do not see a lastNotificationStep != - // LogicalClock.DEFAULT.currentStep() + // ExecutionContext.getContext().getUpdateGraph().logicalClock().currentStep() // assertion error when notifying from an uncoalesced table. final TrackingWritableRowSet parentRowSet = RowSetFactory.empty().toTracking(); @@ -3031,7 +3156,8 @@ public void testNoCoalesceOnNotification() { final UncoalescedTable table = new MockUncoalescedTable(supplier); table.setRefreshing(true); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final TableUpdateImpl update = new TableUpdateImpl(); update.added = RowSetFactory.fromKeys(parentRowSet.size()); @@ -3042,7 +3168,8 @@ public void testNoCoalesceOnNotification() { parentRowSet.insert(update.added()); table.notifyListeners(update); - Assert.assertEquals(LogicalClock.DEFAULT.currentStep(), table.getLastNotificationStep()); + Assert.assertEquals(ExecutionContext.getContext().getUpdateGraph().clock().currentStep(), + table.getLastNotificationStep()); }); } @@ -3059,7 +3186,8 @@ public void testNotifyListenersReleasesUpdateEmptyUpdate() { final TableUpdateListener listener = new io.deephaven.engine.table.impl.SimpleListener(src); src.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(src, update.added()); src.notifyListeners(update); }); @@ -3075,7 +3203,8 @@ public void testNotifyListenersReleasesUpdateNoListeners() { update.shifted = RowSetShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(src, update.added()); src.notifyListeners(update); }); @@ -3095,7 +3224,8 @@ public void testNotifyListenersReleasesUpdateChildListener() { final ShiftObliviousListener listener = new SimpleShiftObliviousListener(src); src.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(src, update.added()); src.notifyListeners(update); }); @@ -3116,7 +3246,8 @@ public void testNotifyListenersReleasesUpdateShiftAwareChildListener() { new io.deephaven.engine.table.impl.SimpleListener(src); src.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(src, update.added()); src.notifyListeners(update); }); @@ -3145,7 +3276,8 @@ public void testRegressionIssue544() { int i = 1; for (int step = 0; step < 2; ++step) { final int key = i++; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet addRowSet = i(key); addToTable(t1, addRowSet, intCol("T", key)); t1.notifyListeners(addRowSet, i(), i()); @@ -3247,4 +3379,142 @@ protected MockUncoalescedTable copy() { return new MockUncoalescedTable(supplier); } } + + public void testMultipleUpdateGraphs() { + final QueryTable r1, s1, r2, s2; + final UpdateGraph g1 = new DummyUpdateGraph("one"); + final UpdateGraph g2 = new DummyUpdateGraph("two"); + + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(g1).open()) { + r1 = testRefreshingTable(i().toTracking(), intCol("T")); + s1 = testTable(i().toTracking(), intCol("T")); + } + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(g2).open()) { + r2 = testRefreshingTable(i().toTracking(), intCol("T")); + s2 = testTable(i().toTracking(), intCol("T")); + } + + try { + g1.sharedLock().computeLocked(() -> g2.sharedLock().computeLocked(() -> merge(r1, r2))); + fail("Expected conflict"); + } catch (UpdateGraphConflictException expected) { + } + + try { + g1.sharedLock().computeLocked(() -> g2.sharedLock().computeLocked(() -> merge(s1, r1, s2, r2))); + fail("Expected conflict"); + } catch (UpdateGraphConflictException expected) { + } + + assertEquals(g1, g1.sharedLock().computeLocked(() -> merge(r1, s2).getUpdateGraph())); + assertEquals(g1, g1.sharedLock().computeLocked(() -> merge(s2, r1).getUpdateGraph())); + assertEquals(g1, g1.sharedLock().computeLocked(() -> merge(r1, s1, s2).getUpdateGraph())); + assertEquals(g1, g1.sharedLock().computeLocked(() -> merge(s2, s1, r1).getUpdateGraph())); + + assertEquals(g2, g2.sharedLock().computeLocked(() -> merge(r2, s1).getUpdateGraph())); + assertEquals(g2, g2.sharedLock().computeLocked(() -> merge(s1, r2).getUpdateGraph())); + assertEquals(g2, g2.sharedLock().computeLocked(() -> merge(r2, s2, s1).getUpdateGraph())); + assertEquals(g2, g2.sharedLock().computeLocked(() -> merge(s1, s2, r2).getUpdateGraph())); + } + + private static final class DummyUpdateGraph implements UpdateGraph { + + private final String name; + private final UpdateGraphLock lock; + + private final ThreadLocal serialTableOperationsSafe = ThreadLocal.withInitial(() -> false); + + private DummyUpdateGraph(@NotNull final String name) { + this.name = name; + lock = UpdateGraphLock.create(this, true); + } + + @Override + public LogOutput append(LogOutput logOutput) { + return logOutput.append(getClass().getName()); + } + + @Override + public boolean satisfied(long step) { + throw new UnsupportedOperationException(); + } + + @Override + public UpdateGraph getUpdateGraph() { + return this; + } + + @Override + public void addNotification(@NotNull Notification notification) { + throw new UnsupportedOperationException(); + } + + @Override + public void addNotifications(@NotNull Collection notifications) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean maybeAddNotification(@NotNull Notification notification, long deliveryStep) { + throw new UnsupportedOperationException(); + } + + @Override + public AwareFunctionalLock sharedLock() { + return lock.sharedLock(); + } + + @Override + public AwareFunctionalLock exclusiveLock() { + return lock.exclusiveLock(); + } + + @Override + public LogicalClock clock() { + return () -> 1; + } + + @Override + public int parallelismFactor() { + return 1; + } + + @Override + public LogEntry logDependencies() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean currentThreadProcessesUpdates() { + return false; + } + + @Override + public boolean serialTableOperationsSafe() { + return serialTableOperationsSafe.get(); + } + + @Override + public boolean setSerialTableOperationsSafe(final boolean newValue) { + final boolean oldValue = serialTableOperationsSafe.get(); + serialTableOperationsSafe.set(newValue); + return oldValue; + } + + @Override + public boolean supportsRefreshing() { + return true; + } + + @Override + public void addSource(@NotNull Runnable updateSource) {} + + @Override + public void removeSource(@NotNull Runnable updateSource) {} + + @Override + public void requestRefresh() { + throw new UnsupportedOperationException(); + } + } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTreeTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTreeTest.java index a21c2f66c96..961e34ce101 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTreeTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableTreeTest.java @@ -66,8 +66,8 @@ public void testNothing() {} // col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2)); // // final Table treed = - // UpdateGraphProcessor.DEFAULT.exclusiveLock() - // .computeLocked(() -> source.tree("Sentinel", "Parent")); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> source.tree("Sentinel", + // "Parent")); // final String hierarchicalColumnName = getHierarchicalColumnName(treed); // TableTools.showWithRowSet(treed); // @@ -122,15 +122,15 @@ public void testNothing() {} // final Table rootExpected3 = source3.where("isNull(Parent)"); // // final Supplier

doTree = () -> source.tree("Sentinel", "Parent"); - // final Table expect = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(doTree::get); - // final Table expectOriginal = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // final Table expect = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(doTree::get); + // final Table expectOriginal = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> makeStatic(source).tree("Sentinel", "Parent")); - // final Table expect2 = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // final Table expect2 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> source2.tree("Sentinel", "Parent")); // // final String hierarchicalColumnName = getHierarchicalColumnName(expect); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // // final Table treed1 = pool.submit(doTree::get).get(); // @@ -190,7 +190,7 @@ public void testNothing() {} // doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3, 0, 4, hierarchicalColumnName, // CollectionUtil.ZERO_LENGTH_STRING_ARRAY); // - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // // doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed1, 0, 4, hierarchicalColumnName, // CollectionUtil.ZERO_LENGTH_STRING_ARRAY); @@ -210,7 +210,7 @@ public void testNothing() {} // final Table eleven1a = map1.get(11); // assertNull(eleven1a); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // // final Table backwards1 = // pool.submit(() -> TreeTableFilter.rawFilterTree(treed1, "!isNull(Extra)").sortDescending("Extra")) @@ -241,7 +241,7 @@ public void testNothing() {} // source.notifyListeners(i(12), i(), i()); // // final Table treed6 = pool.submit(doTree::get).get(); - // UpdateGraphProcessor.DEFAULT.flushAllNormalNotificationsForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().flushAllNormalNotificationsForUnitTests(); // // final Table backwardsTree1c = pool.submit(() -> backwards1.tree("Sentinel", "Parent")).get(); // final Table backwardsTree2b = pool.submit(() -> backwards2.tree("Sentinel", "Parent")).get(); @@ -257,7 +257,7 @@ public void testNothing() {} // assertTableEquals(root2a, rootExpected3); // assertTableEquals(root3a, rootExpected3); // - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // // final Table eleven1c = map1.get(11); // assertNotNull(eleven1c); @@ -278,7 +278,7 @@ public void testNothing() {} // hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); // } // - // final Table backwardsExpected = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // final Table backwardsExpected = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> source.sortDescending("Extra").tree("Sentinel", "Parent")); // ii = 1; // for (Table treed : Arrays.asList(backwardsTree1a, backwardsTree1b, backwardsTree1c, backwardsTree2a, @@ -313,10 +313,10 @@ public void testNothing() {} // final Function doSortAndTree = doSort.andThen(doTree); // // final Table expect = - // UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> doSortAndTree.apply(source)); - // final Table expectOriginal = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> doSortAndTree.apply(source)); + // final Table expectOriginal = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> doSortAndTree.apply(makeStatic(source))); - // final Table expect2 = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // final Table expect2 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> doSortAndTree.apply(makeStatic(source2))); // // final String hierarchicalColumnName = getHierarchicalColumnName(expect); @@ -325,7 +325,7 @@ public void testNothing() {} // final Table sorted0Original = doSort.apply(makeStatic(source)); // final Table sorted2 = doSort.apply(makeStatic(source2)); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // // final Table treed1 = pool.submit(() -> doSortAndTree.apply(source)).get(); // final Table sorted1 = pool.submit(() -> doSort.apply(source)).get(); @@ -352,7 +352,7 @@ public void testNothing() {} // // source.notifyListeners(i(11, 12), i(0), i(1)); // - // UpdateGraphProcessor.DEFAULT.flushAllNormalNotificationsForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().flushAllNormalNotificationsForUnitTests(); // // // everything should have current values now // doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect2, false, false, 0, 4, @@ -399,7 +399,7 @@ public void testNothing() {} // assertTableEquals(sorted2, sorted0); // assertTableEquals(sorted2, sorted1); // - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // // doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed1, 0, 4, hierarchicalColumnName, // CollectionUtil.ZERO_LENGTH_STRING_ARRAY); @@ -430,7 +430,7 @@ public void testNothing() {} // col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 6)); // // final Table treed = - // UpdateGraphProcessor.DEFAULT.exclusiveLock() + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> source.tree("Sentinel", "Parent")); // TableTools.showWithRowSet(treed); // @@ -455,7 +455,7 @@ public void testNothing() {} // col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 6)); // // final Table treed = - // UpdateGraphProcessor.DEFAULT.exclusiveLock() + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> source.tree("Sentinel", "Parent")); // TableTools.showWithRowSet(treed); // @@ -481,61 +481,61 @@ public void testNothing() {} // // assertNull(getChildTable(filtered, child2, hierarchicalColumnName, 0)); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(source, i(10), col("Sentinel", 11), col("Parent", 2)); // source.notifyListeners(i(10), i(), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // System.out.println("Modified."); // TableTools.showWithRowSet(filtered); // assertEquals(2, filtered.size()); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(source, i(10), col("Sentinel", 12), col("Parent", 2)); // source.notifyListeners(i(), i(), i(10)); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // System.out.println("Modified."); // TableTools.showWithRowSet(filtered); // assertEquals(1, filtered.size()); // // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(source, i(10, 11), col("Sentinel", 12, 11), col("Parent", 2, 12)); // source.notifyListeners(i(11), i(), i(10)); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // System.out.println("Grand parent."); // TableTools.showWithRowSet(filtered); // assertEquals(2, filtered.size()); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(source, i(11), col("Sentinel", 13), col("Parent", 12)); // source.notifyListeners(i(), i(), i(11)); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // System.out.println("Grand parent disappear."); // TableTools.showWithRowSet(filtered); // assertEquals(1, filtered.size()); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(source, i(12), col("Sentinel", 14), col("Parent", 13)); // source.notifyListeners(i(12), i(), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // TableTools.showWithRowSet(source, 15); // System.out.println("Great grand parent appear."); // TableTools.showWithRowSet(filtered); // assertEquals(2, filtered.size()); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // removeRows(source, i(1)); // source.notifyListeners(i(), i(1), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // TableTools.showWithRowSet(source, 15); // System.out.println("2 removed."); // TableTools.showWithRowSet(filtered); // assertEquals(1, filtered.size()); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(source, i(1), col("Sentinel", 2), col("Parent", NULL_INT)); // source.notifyListeners(i(1), i(), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // TableTools.showWithRowSet(source, 15); // System.out.println("2 resurrected."); // TableTools.showWithRowSet(filtered); @@ -546,32 +546,32 @@ public void testNothing() {} // final QueryTable source = testRefreshingTable(RowSetFactory.flat(4).toTracking(), // col("Sentinel", 1, 2, 3, 4), col("Parent", NULL_INT, NULL_INT, 1, 5)); // - // final Table treed = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> TreeTable + // final Table treed = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> TreeTable // .promoteOrphans(source, "Sentinel", "Parent").tree("Sentinel", "Parent")); // TableTools.showWithRowSet(treed); // assertEquals(3, treed.size()); // // // add a parent, which will make something not an orphan - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(source, i(5), col("Sentinel", 5), col("Parent", 1)); // source.notifyListeners(i(5), i(), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // TableTools.showWithRowSet(treed); // assertEquals(2, treed.size()); // // // swap two things - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(source, i(0, 1), col("Sentinel", 2, 1), col("Parent", NULL_INT, NULL_INT)); // source.notifyListeners(i(), i(), i(0, 1)); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // TableTools.showWithRowSet(treed); // assertEquals(2, treed.size()); // // // now remove a parent - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // removeRows(source, i(0, 1)); // source.notifyListeners(i(), i(0, 1), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // TableTools.showWithRowSet(treed); // assertEquals(2, treed.size()); // } @@ -602,7 +602,7 @@ public void testNothing() {} // new EvalNugget() { // @Override // protected Table e() { - // return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> { + // return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> { // final Table treed = source.tree("Sentinel", "Parent"); // return TreeTableFilter.rawFilterTree(treed, "Filter in 1"); // }); @@ -614,35 +614,35 @@ public void testNothing() {} // Assert.assertEquals(0, en[0].originalValue.size()); // // // modify child to have parent - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(source, i(0), col("Sentinel", 0), col("Filter", 1), col("Parent", 1)); // source.notifyListeners(i(), i(), i(0)); // }); // Assert.assertEquals(i(0, 1), en[0].originalValue.getRowSet()); // // // modify parent to have grandparent - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(source, i(1), col("Sentinel", 1), col("Filter", 0), col("Parent", 2)); // source.notifyListeners(i(), i(), i(1)); // }); // Assert.assertEquals(i(0, 1, 2), en[0].originalValue.getRowSet()); // // // modify parent's id to orphan child - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(source, i(1), col("Sentinel", -1), col("Filter", 0), col("Parent", 2)); // source.notifyListeners(i(), i(), i(1)); // }); // Assert.assertEquals(i(0), en[0].originalValue.getRowSet()); // // // revert parent's id and adopt child - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(source, i(1), col("Sentinel", 1), col("Filter", 0), col("Parent", 2)); // source.notifyListeners(i(), i(), i(1)); // }); // Assert.assertEquals(i(0, 1, 2), en[0].originalValue.getRowSet()); // // // remove child, resurrect parent - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // removeRows(source, i(0)); // addToTable(source, i(3), col("Sentinel", 3), col("Filter", 1), col("Parent", 1)); // source.notifyListeners(i(), i(0), i(3)); @@ -987,22 +987,22 @@ public void testNothing() {} // new TreeTableEvalNugget(prepared) { // @Override // protected Table e() { - // return UpdateGraphProcessor.DEFAULT.exclusiveLock() - // .computeLocked(() -> prepared.tree("ID", "Parent")); + // return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> prepared.tree("ID", + // "Parent")); // } // }, // new TreeTableEvalNugget(prepared) { // @Override // protected Table e() { - // return UpdateGraphProcessor.DEFAULT.exclusiveLock() - // .computeLocked(() -> prepared.sort("Sym").tree("ID", "Parent")); + // return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> + // prepared.sort("Sym").tree("ID", "Parent")); // } // }, // new TreeTableEvalNugget(prepared) { // @Override // protected Table e() { - // return UpdateGraphProcessor.DEFAULT.exclusiveLock() - // .computeLocked(() -> prepared.sort("Sentinel").tree("ID", "Parent")); + // return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> + // prepared.sort("Sentinel").tree("ID", "Parent")); // } // }, // new TreeTableEvalNugget(prepared) { @@ -1056,21 +1056,21 @@ public void testNothing() {} // new EvalNugget() { // @Override // protected Table e() { - // return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( + // return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( // () -> TreeTable.promoteOrphans((QueryTable) prepared, "ID", "Parent")); // } // }, // new EvalNugget() { // @Override // protected Table e() { - // return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> TreeTable + // return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> TreeTable // .promoteOrphans((QueryTable) prepared.where("Sentinel % 2 == 0"), "ID", "Parent")); // } // }, // new TreeTableEvalNugget(prepared) { // @Override // protected Table e() { - // return UpdateGraphProcessor.DEFAULT.exclusiveLock() + // return ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> TreeTable.promoteOrphans((QueryTable) prepared // .where("Sentinel % 2 == 0"), "ID", "Parent").tree("ID", "Parent")); // } @@ -1175,16 +1175,19 @@ public void testNothing() {} // System.out.println("Source Data:"); // TableTools.showWithRowSet(table); // - // final Table rollup = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // final Table rollup = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> table.rollup(comboAgg, "USym", "Instant", "BoolCol", "BigIntCol", "BigDecCol")); // verifyReverseLookup(rollup); // // verifyReverseLookup( - // UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> table.rollup(comboAgg, "USym"))); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> table.rollup(comboAgg, + // "USym"))); // verifyReverseLookup( - // UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> table.rollup(comboAgg, "Instant"))); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> table.rollup(comboAgg, + // "Instant"))); // verifyReverseLookup( - // UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> table.rollup(comboAgg, "BoolCol"))); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> table.rollup(comboAgg, + // "BoolCol"))); // } // // private void verifyReverseLookup(Table rollup) { @@ -1291,10 +1294,11 @@ public void testNothing() {} // TableTools.showWithRowSet(table); // // final Table rollup = - // UpdateGraphProcessor.DEFAULT.exclusiveLock() + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> table.rollup(comboAgg, "USym", "Group")); // - // final Table fullBy = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> table.aggBy(comboAgg)); + // final Table fullBy = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> + // table.aggBy(comboAgg)); // System.out.println("Full By:"); // TableTools.showWithRowSet(fullBy); // @@ -1333,7 +1337,7 @@ public void testNothing() {} // // final SafeCloseable scopeCloseable = LivenessScopeStack.open(); // - // final Table rollup = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // final Table rollup = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> table.rollup(List.of(AggSum("IntCol", "DoubleCol")), "USym", "Group")); // final TableMap rootMap = (TableMap) rollup.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); // final Table nextLevel = rootMap.get(SmartKey.EMPTY); @@ -1344,7 +1348,7 @@ public void testNothing() {} // // final SingletonLivenessManager rollupManager = new SingletonLivenessManager(rollup); // - // UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(scopeCloseable::close); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(scopeCloseable::close); // // Assert.assertTrue(rollup.tryRetainReference()); // Assert.assertTrue(rootMap.tryRetainReference()); @@ -1354,7 +1358,7 @@ public void testNothing() {} // rootMap.dropReference(); // nextLevel.dropReference(); // - // UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(rollupManager::release); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(rollupManager::release); // // // we should not be able to retainReference the rollup, because closing the scope should have decremented it to // // zero @@ -1386,7 +1390,7 @@ public void testNothing() {} // // final SingletonLivenessManager treeManager = new SingletonLivenessManager(treed); // - // UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(scopeCloseable::close); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(scopeCloseable::close); // // Assert.assertTrue(treed.tryRetainReference()); // Assert.assertTrue(promoted.tryRetainReference()); @@ -1397,7 +1401,7 @@ public void testNothing() {} // // assertTableEquals(table, treed); // - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // final long key = table.getRowSet().firstRowKey(); // table.getRowSet().writableCast().remove(key); // removeRows(table, i(key)); @@ -1406,7 +1410,7 @@ public void testNothing() {} // // assertTableEquals(table, treed); // - // UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(treeManager::release); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(treeManager::release); // // // we should not be able to retainReference the tree table, because closing the scope should have decremented it // // to zero @@ -1422,13 +1426,13 @@ public void testNothing() {} // // final SafeCloseable scopeCloseable = LivenessScopeStack.open(); // - // final Table rollup = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // final Table rollup = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> table.rollup(List.of(AggSum("IntCol", "DoubleCol")), "USym", "Group")); // final TableMap rootMap = (TableMap) rollup.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); // // final SingletonLivenessManager rollupManager = new SingletonLivenessManager(rollup); // - // UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(scopeCloseable::close); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(scopeCloseable::close); // // // dumpRollup(rollup, getHierarchicalColumnName(rollup), "USym", "Group"); // @@ -1438,11 +1442,11 @@ public void testNothing() {} // rollup.dropReference(); // rootMap.dropReference(); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // addToTable(table, i(0, 1), col("USym", "AAPL", "TSLA"), col("Group", "Terran", "Vulcan"), // intCol("IntCol", 1, 2), doubleCol("DoubleCol", .1, .2)); // table.notifyListeners(i(0, 1), i(), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // // final SafeCloseable getScope = LivenessScopeStack.open(); // final Table nextLevel = rootMap.get(SmartKey.EMPTY); @@ -1456,8 +1460,8 @@ public void testNothing() {} // rootMap.dropReference(); // nextLevel.dropReference(); // - // UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(getScope::close); - // UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(rollupManager::release); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(getScope::close); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(rollupManager::release); // // // we should not be able to retainReference the rollup, because closing the scope should have decremented it to // // zero @@ -1482,7 +1486,7 @@ public void testNothing() {} // new DoubleGenerator(-100, 100), // new SetGenerator<>("A", "B", "C", "D"))); // - // final Table rollup = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked( + // final Table rollup = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( // () -> table.rollup(List.of(AggSum("DoubleCol"), AggFirst("StringCol")), "USym", "Group", "IntCol")); // TestCase.assertEquals(String.class, rollup.getColumnSource("USym").getType()); // TestCase.assertEquals(String.class, rollup.getColumnSource("Group").getType()); @@ -1530,13 +1534,14 @@ public void testNothing() {} // col("G2", "C", "C", "D", "D", "E", "E"), // col("IntCol", 1, 2, 3, 4, 5, 6)); // - // final Table rollup = UpdateGraphProcessor.DEFAULT.exclusiveLock() + // final Table rollup = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> table.rollup(List.of(aggregation), "G1", "G2")); // // dumpRollup(rollup, "G1", "G2"); // // final Table fullBy = - // UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> table.aggBy(List.of(aggregation))); + // ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> + // table.aggBy(List.of(aggregation))); // // final Table rollupClean = getDiffableTable(rollup).view("IntCol"); // @@ -1544,10 +1549,10 @@ public void testNothing() {} // // assertEquals("", diff); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // removeRows(table, i(2)); // table.notifyListeners(i(), i(2), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // // System.out.println("Removed Row 2, Rollup:"); // dumpRollup(rollup, "G1", "G2"); @@ -1557,10 +1562,10 @@ public void testNothing() {} // final String diff2 = TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); // assertEquals("", diff2); // - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // removeRows(table, i(0, 1)); // table.notifyListeners(i(), i(0, 1), i()); - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // // dumpRollup(rollup, "G1", "G2"); // @@ -1665,7 +1670,7 @@ public void testNothing() {} // new RollupEvalNugget(3, "USym", "Group") { // @Override // protected Table e() { - // return UpdateGraphProcessor.DEFAULT.exclusiveLock() + // return ExecutionContext.getContext().getUpdateGraph().exclusiveLock() // .computeLocked(() -> table.rollup(rollupDefinition, "USym", "Group")); // } // @@ -1732,7 +1737,7 @@ public void testNothing() {} // // for (int step = 0; step < 100; ++step) { // System.out.println("step = " + step); - // UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); // // final int numChanges = random.nextInt(100); // final RowSetBuilderSequential builder = RowSetFactory.builderSequential(); @@ -1811,7 +1816,7 @@ public void testNothing() {} // // // TableTools.showWithRowSet(source.getSubTable(newRowSet)); // - // UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + // ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // // final String hierarchicalColumnName = getHierarchicalColumnName(ordersFiltered); // doCompareWithChildrenForTrees("step = " + step, ordersFiltered, @@ -1834,7 +1839,7 @@ public void testNothing() {} // assertNull(rollup.getColumn("BigI").get(0)); // assertNull(rollup.getColumn("BigD").get(0)); // - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(table, i(2, 3), col("Sym", "A", "A"), col("BigI", BigInteger.ZERO, BigInteger.ZERO), // col("BigD", BigDecimal.ZERO, BigDecimal.ZERO)); // table.notifyListeners(i(2, 3), i(), i()); @@ -1865,7 +1870,7 @@ public void testNothing() {} // assertNotNull(aTable); // // // Start with Nulls and make sure we get NaN - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(dataTable, i(1, 2), // stringCol("USym", "A", "A"), // doubleCol("Value", NULL_DOUBLE, NULL_DOUBLE), @@ -1893,7 +1898,7 @@ public void testNothing() {} // assertEquals(Double.NaN, aTable.getColumn("LValue").getDouble(0)); // // // Add a real value 0, which used to be broken because the default value was 0 and resulted in a no change - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(dataTable, i(3), // stringCol("USym", "A"), // doubleCol("Value", 0.0d), @@ -1921,7 +1926,7 @@ public void testNothing() {} // assertEquals(0.0d, aTable.getColumn("LValue").getDouble(0)); // // // Delete the real value to make sure we go back to NaN - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // removeRows(dataTable, i(3)); // // dataTable.notifyListeners(i(), i(3), i()); @@ -1942,7 +1947,7 @@ public void testNothing() {} // assertEquals(Double.NaN, aTable.getColumn("LValue").getDouble(0)); // // // Add a couple of real 0's and make sure we get a 0 - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(dataTable, i(3, 4, 5), // stringCol("USym", "A", "A", "A"), // doubleCol("Value", 0.0d, 0.0d, 0.0d), @@ -1969,7 +1974,7 @@ public void testNothing() {} // assertEquals(0.0d, aTable.getColumn("IValue").getDouble(0)); // assertEquals(0.0d, aTable.getColumn("LValue").getDouble(0)); // - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(dataTable, i(6), // stringCol("USym", "A"), // doubleCol("Value", 1.0d), @@ -2013,7 +2018,7 @@ public void testNothing() {} // final Table rollup = source.rollup(List.of(AggVar("Val")), "G1", "G2"); // checkVar(source, rollup); // - // UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(() -> { // addToTable(source, i(9, 11), col("G1", "B", "A"), col("G2", "a", "a"), intCol("Val", 6, 7)); // final TableUpdate update = // new TableUpdateImpl(i(), i(), i(9, 11), RowSetShiftData.EMPTY, source.newModifiedColumnSet("Val")); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableWhereTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableWhereTest.java index 415843b416b..c894a45d746 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableWhereTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableWhereTest.java @@ -5,40 +5,35 @@ import io.deephaven.api.RawString; import io.deephaven.api.filter.Filter; +import io.deephaven.chunk.Chunk; +import io.deephaven.chunk.LongChunk; +import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.context.ExecutionContext; -import io.deephaven.engine.exceptions.CancellationException; -import io.deephaven.engine.table.ShiftObliviousListener; -import io.deephaven.engine.table.impl.sources.RowIdSource; -import io.deephaven.engine.testutil.QueryTableTestBase.TableComparator; -import io.deephaven.engine.table.impl.chunkfilter.ChunkFilter; -import io.deephaven.engine.table.Table; -import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.generator.*; -import io.deephaven.engine.testutil.GenerateTableUpdates; -import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; -import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import io.deephaven.engine.table.impl.select.MatchPairFactory; import io.deephaven.engine.context.QueryScope; +import io.deephaven.engine.exceptions.CancellationException; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.verify.TableAssertions; import io.deephaven.engine.table.impl.select.*; import io.deephaven.engine.table.impl.chunkfilter.IntRangeComparator; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.table.impl.sources.UnionRedirection; -import io.deephaven.chunk.*; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys; +import io.deephaven.engine.table.ShiftObliviousListener; +import io.deephaven.engine.table.Table; +import io.deephaven.engine.table.impl.chunkfilter.ChunkFilter; +import io.deephaven.engine.table.impl.sources.RowIdSource; +import io.deephaven.engine.testutil.*; +import io.deephaven.engine.testutil.QueryTableTestBase.TableComparator; +import io.deephaven.engine.testutil.generator.*; import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.internal.log.LoggerFactory; +import io.deephaven.io.logger.Logger; +import io.deephaven.time.DateTimeUtils; import io.deephaven.util.QueryConstants; import io.deephaven.util.SafeCloseable; import io.deephaven.util.annotations.ReflexiveUse; - import junit.framework.TestCase; import org.apache.commons.lang3.mutable.MutableObject; import org.junit.Rule; @@ -54,15 +49,13 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.function.IntUnaryOperator; +import static io.deephaven.engine.testutil.TstUtils.*; import static io.deephaven.engine.testutil.testcase.RefreshingTableTestCase.printTableUpdates; import static io.deephaven.engine.testutil.testcase.RefreshingTableTestCase.simulateShiftAwareStep; import static io.deephaven.engine.util.TableTools.*; -import static io.deephaven.engine.testutil.TstUtils.*; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.*; public abstract class QueryTableWhereTest { private Logger log = LoggerFactory.getLogger(QueryTableWhereTest.class); @@ -96,7 +89,8 @@ public void testWhere() { assertTableEquals(whereResult, testRefreshingTable( i(2, 6).toTracking(), col("x", 1, 3), col("y", 'a', 'c'))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 9), col("x", 4, 5), col("y", 'd', 'e')); table.notifyListeners(i(7, 9), i(), i()); }); @@ -107,7 +101,7 @@ public void testWhere() { assertEquals(base.removed, i()); assertEquals(base.modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 9), col("x", 3, 10), col("y", 'e', 'd')); table.notifyListeners(i(), i(), i(7, 9)); }); @@ -119,7 +113,7 @@ public void testWhere() { assertEquals(base.removed, i(9)); assertEquals(base.modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2, 6, 7)); table.notifyListeners(i(), i(2, 6, 7), i()); }); @@ -130,7 +124,7 @@ public void testWhere() { assertEquals(base.removed, i(2, 6, 7)); assertEquals(base.modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(9)); addToTable(table, i(2, 4, 6), col("x", 1, 21, 3), col("y", 'a', 'x', 'c')); table.notifyListeners(i(2, 6), i(9), i(4)); @@ -174,7 +168,8 @@ public void testWhereOneOfTwo() { assertTableEquals(whereResult, testRefreshingTable( i(2, 6, 8).toTracking(), col("x", 1, 3, 4), col("y", 'a', 'c', 'f'))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 9), col("x", 4, 5), col("y", 'd', 'e')); table.notifyListeners(i(7, 9), i(), i()); }); @@ -187,7 +182,7 @@ public void testWhereOneOfTwo() { assertEquals(base.removed, i()); assertEquals(base.modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 9), col("x", 3, 10), col("y", 'e', 'd')); table.notifyListeners(i(), i(), i(7, 9)); }); @@ -201,7 +196,7 @@ public void testWhereOneOfTwo() { assertEquals(base.removed, i(9)); assertEquals(base.modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2, 6, 7)); table.notifyListeners(i(), i(2, 6, 7), i()); }); @@ -212,7 +207,7 @@ public void testWhereOneOfTwo() { assertEquals(base.removed, i(2, 6, 7)); assertEquals(base.modified, i()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(9)); addToTable(table, i(2, 4, 6), col("x", 1, 21, 3), col("y", 'a', 'x', 'c')); table.notifyListeners(i(2, 6), i(9), i(4)); @@ -249,64 +244,65 @@ public void testWhereInDependency() { final WhereFilter composedFilter = DisjunctiveFilter.makeDisjunctiveFilter(dynamicFilter1, dynamicFilter2); final Table composed = tableToFilter.where(composedFilter); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(composed.satisfied(LogicalClock.DEFAULT.currentStep())); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + TestCase.assertTrue(dynamicFilter1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(dynamicFilter2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(composed.satisfied(updateGraph.clock().currentStep())); }); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(setTable, i(103), col("A", 5), col("B", 8)); setTable.notifyListeners(i(103), i(), i()); - TestCase.assertFalse(setTable1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(setTable2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(composed.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(setTable1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(setTable2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(dynamicFilter1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(dynamicFilter2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(composed.satisfied(updateGraph.clock().currentStep())); // this will do the notification for table; which should first fire the recorder for setTable1 - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + updateGraph.flushOneNotificationForUnitTests(); // this will do the notification for table; which should first fire the recorder for setTable2 - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + updateGraph.flushOneNotificationForUnitTests(); // this will do the notification for table; which should first fire the merged listener for 1 - boolean flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + boolean flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); // to get table 1 satisfied we need to still fire a notification for the filter execution, then the combined // execution if (QueryTable.FORCE_PARALLEL_WHERE) { // the merged notification for table 2 goes first - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); log.debug().append("Flushing parallel notifications for setTable1").endl(); - TestCase.assertFalse(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(setTable1.satisfied(updateGraph.clock().currentStep())); // we need to flush our intermediate notification - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); // and our final notification - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); } - TestCase.assertTrue(setTable1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(setTable2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(composed.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(setTable1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(setTable2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(dynamicFilter1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(dynamicFilter2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(composed.satisfied(updateGraph.clock().currentStep())); if (!QueryTable.FORCE_PARALLEL_WHERE) { // the next notification should be the merged listener for setTable2 - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); } else { log.debug().append("Flushing parallel notifications for setTable2").endl(); // we need to flush our intermediate notification - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); // and our final notification - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); } @@ -314,63 +310,63 @@ public void testWhereInDependency() { // now we have the two set table's filtered we are ready to make sure nothing else is satisfied - TestCase.assertTrue(setTable1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(setTable2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(composed.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(setTable1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(setTable2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(dynamicFilter1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(dynamicFilter2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(composed.satisfied(updateGraph.clock().currentStep())); log.debug().append("Flushing DynamicFilter Notifications.").endl(); // the dynamicFilter1 updates - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue(setTable1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(setTable2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse(composed.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(setTable1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(setTable2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(dynamicFilter1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(dynamicFilter2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertFalse(composed.satisfied(updateGraph.clock().currentStep())); // the dynamicFilter2 updates - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); log.debug().append("Flushed DynamicFilter Notifications.").endl(); - TestCase.assertTrue(setTable1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(setTable2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(setTable1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(setTable2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(dynamicFilter1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(dynamicFilter2.satisfied(updateGraph.clock().currentStep())); log.debug().append("Checking Composed.").endl(); - TestCase.assertFalse(composed.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(composed.satisfied(updateGraph.clock().currentStep())); // now that both filters are complete, we can run the merged listener - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); if (QueryTable.FORCE_PARALLEL_WHERE) { - TestCase.assertFalse(composed.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(composed.satisfied(updateGraph.clock().currentStep())); // and the filter execution - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); // and the combination - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); } log.debug().append("Composed flushed.").endl(); - TestCase.assertTrue(setTable1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(setTable2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue(composed.satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(setTable1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(setTable2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(dynamicFilter1.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(dynamicFilter2.satisfied(updateGraph.clock().currentStep())); + TestCase.assertTrue(composed.satisfied(updateGraph.clock().currentStep())); // and we are done - flushed = UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + flushed = updateGraph.flushOneNotificationForUnitTests(); TestCase.assertFalse(flushed); }); @@ -388,18 +384,19 @@ public void testWhereDynamicIn() { final QueryTable filteredTable = testRefreshingTable(i(1, 2, 3, 4, 5).toTracking(), col("X", "A", "B", "C", "D", "E")); - final Table result = - UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> filteredTable.whereIn(setTable, "X")); - final Table resultInverse = - UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> filteredTable.whereNotIn(setTable, "X")); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + + final Table result = updateGraph.exclusiveLock().computeLocked( + () -> filteredTable.whereIn(setTable, "X")); + final Table resultInverse = updateGraph.exclusiveLock().computeLocked( + () -> filteredTable.whereNotIn(setTable, "X")); show(result); assertEquals(3, result.size()); assertEquals(asList("A", "B", "C"), asList((String[]) DataAccessHelpers.getColumn(result, "X").getDirect())); assertEquals(2, resultInverse.size()); assertEquals(asList("D", "E"), asList((String[]) DataAccessHelpers.getColumn(resultInverse, "X").getDirect())); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(filteredTable, i(6), col("X", "A")); filteredTable.notifyListeners(i(6), i(), i()); }); @@ -410,7 +407,7 @@ public void testWhereDynamicIn() { assertEquals(2, resultInverse.size()); assertEquals(asList("D", "E"), asList((String[]) DataAccessHelpers.getColumn(resultInverse, "X").getDirect())); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(setTable, i(7), col("X", "D")); setTable.notifyListeners(i(7), i(), i()); }); @@ -472,11 +469,12 @@ public void testWhereDynamicInIncremental() { EvalNugget.from(() -> filteredTable.whereNotIn(setTable, "floatCol")), }; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < 100; step++) { final boolean modSet = random.nextInt(10) < 1; final boolean modFiltered = random.nextBoolean(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { if (modSet) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, setSize, random, setTable, setInfo); @@ -484,7 +482,7 @@ public void testWhereDynamicInIncremental() { }); validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { if (modFiltered) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, filteredSize, random, filteredTable, filteredInfo); @@ -498,20 +496,25 @@ public void testWhereDynamicInIncremental() { public void testWhereRefresh() { final Table t1 = TableTools.newTable(col("A", "b", "c", "d")); assertFalse(t1.isRefreshing()); - final Table t2 = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> t1.where("A in `b`")); + final Table t2 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .computeLocked(() -> t1.where("A in `b`")); assertFalse(t2.isRefreshing()); - final Table t3 = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> t1.whereIn(t1, "A")); + final Table t3 = + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> t1.whereIn(t1, "A")); assertFalse(t3.isRefreshing()); final Random random = new Random(0); final QueryTable t4 = getTable(10, random, initColumnInfos(new String[] {"B"}, new SetGenerator<>("a", "b"))); assertTrue(t4.isRefreshing()); - final Table t5 = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> t4.where("B in `b`")); + final Table t5 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .computeLocked(() -> t4.where("B in `b`")); assertTrue(t5.isRefreshing()); - final Table t6 = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> t4.whereIn(t1, "B=A")); + final Table t6 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .computeLocked(() -> t4.whereIn(t1, "B=A")); assertTrue(t6.isRefreshing()); - final Table t7 = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> t1.whereIn(t4, "A=B")); + final Table t7 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .computeLocked(() -> t1.whereIn(t4, "A=B")); assertTrue(t7.isRefreshing()); } @@ -528,20 +531,16 @@ public void testWhereInDiamond() { new IntGenerator(0, 100), new IntGenerator(0, 100))); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); final EvalNugget[] en = new EvalNugget[] { - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.whereIn(table.where("intCol % 25 == 0"), "intCol2=intCol")); - } - }, + EvalNugget.from(() -> updateGraph.exclusiveLock().computeLocked( + () -> table.whereIn(table.where("intCol % 25 == 0"), "intCol2=intCol"))), }; try { for (int step = 0; step < 1000; step++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( - () -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, - size, random, table, filteredInfo)); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( + GenerateTableUpdates.DEFAULT_PROFILE, size, random, table, filteredInfo)); validate(en); } } catch (Exception e) { @@ -553,12 +552,13 @@ public Table e() { public void testWhereInDiamond2() { final QueryTable table = testRefreshingTable(i(1, 2, 3).toTracking(), col("x", 1, 2, 3), col("y", 2, 4, 6)); final Table setTable = table.where("x % 2 == 0").dropColumns("y"); - final Table filteredTable = - UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> table.whereIn(setTable, "y=x")); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Table filteredTable = updateGraph.exclusiveLock().computeLocked( + () -> table.whereIn(setTable, "y=x")); TableTools.show(filteredTable); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(4), col("x", 4), col("y", 8)); table.notifyListeners(i(4), i(), i()); }); @@ -603,43 +603,27 @@ public void testWhereOneOfIncremental() { new DoubleGenerator(0, 100))); final EvalNugget[] en = new EvalNugget[] { - new EvalNugget() { - public Table e() { - return filteredTable - .where(Filter.or(Filter.from("Sym in `aa`, `ee`", "intCol % 2 == 0"))); - } - }, - new EvalNugget() { - public Table e() { - return filteredTable.where(Filter.or( - Filter.and(Filter.from("intCol % 2 == 0", "intCol % 2 == 1")), - RawString.of("Sym in `aa`, `ee`"))); - } - }, - new EvalNugget() { - public Table e() { - return filteredTable.where(Filter.or( - Filter.and(Filter.from("intCol % 2 == 0", "Sym in `aa`, `ii`")), - RawString.of("Sym in `aa`, `ee`"))); - } - }, - new EvalNugget() { - public Table e() { - return filteredTable.where(Filter.or( - RawString.of("intCol % 2 == 0"), - RawString.of("intCol % 2 == 1"), - RawString.of("Sym in `aa`, `ee`"))); - } - }, + EvalNugget.from(() -> filteredTable.where( + Filter.or(Filter.from("Sym in `aa`, `ee`", "intCol % 2 == 0")))), + EvalNugget.from(() -> filteredTable.where(Filter.or( + Filter.and(Filter.from("intCol % 2 == 0", "intCol % 2 == 1")), + RawString.of("Sym in `aa`, `ee`")))), + EvalNugget.from(() -> filteredTable.where(Filter.or( + Filter.and(Filter.from("intCol % 2 == 0", "Sym in `aa`, `ii`")), + RawString.of("Sym in `aa`, `ee`")))), + EvalNugget.from(() -> filteredTable.where(Filter.or( + RawString.of("intCol % 2 == 0"), + RawString.of("intCol % 2 == 1"), + RawString.of("Sym in `aa`, `ee`")))), }; try { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int i = 0; i < 100; i++) { log.debug().append("Step = " + i).endl(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( - () -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, - filteredSize, random, filteredTable, filteredInfo)); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( + GenerateTableUpdates.DEFAULT_PROFILE, filteredSize, random, filteredTable, filteredInfo)); validate(en); } } catch (Exception e) { @@ -669,9 +653,10 @@ public void testWhereWithExcessiveShifting() { EvalNugget.from(() -> TableTools.merge(growingTable, m2).where("intCol % 3 == 0")), }; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 1; ii < 100; ++ii) { final int fii = PRIME * ii; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(growingTable, i(fii), col("intCol", fii)); growingTable.notifyListeners(i(fii), i(), i()); GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, filteredSize, @@ -1151,8 +1136,9 @@ public void testBigTable() { final Table filtered = source.where(incrementalReleaseFilter); final Table result = filtered.where("A >= 6_000_000L", "A < 7_000_000L"); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (filtered.size() < source.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); } assertEquals(1_000_000, result.size()); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableWouldMatchTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableWouldMatchTest.java index 17c75330b81..7d99bc840ff 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableWouldMatchTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/QueryTableWouldMatchTest.java @@ -3,21 +3,21 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.table.ShiftObliviousListener; -import io.deephaven.engine.testutil.*; -import io.deephaven.engine.testutil.generator.*; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.WouldMatchPair; import io.deephaven.engine.table.impl.select.DynamicWhereFilter; +import io.deephaven.engine.testutil.*; +import io.deephaven.engine.testutil.generator.*; import junit.framework.TestCase; import java.util.Arrays; import java.util.Random; +import static io.deephaven.engine.testutil.TstUtils.*; import static io.deephaven.engine.util.TableTools.col; import static io.deephaven.engine.util.TableTools.show; -import static io.deephaven.engine.testutil.TstUtils.*; public class QueryTableWouldMatchTest extends QueryTableTestBase { @@ -41,7 +41,8 @@ public void testMatch() { Arrays.asList(DataAccessHelpers.getColumn(t1Matched, "Compound").get(0, 6))); // Add - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(t1, i(7, 9), col("Text", "Cake", "Zips For Fun"), col("Number", 6, 1), col("Bool", false, false)); @@ -59,7 +60,7 @@ public void testMatch() { Arrays.asList(DataAccessHelpers.getColumn(t1Matched, "Compound").get(0, 8))); // Remove - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(t1, i(1, 3)); t1.notifyListeners(i(), i(1, 3), i()); }); @@ -75,7 +76,7 @@ public void testMatch() { Arrays.asList(DataAccessHelpers.getColumn(t1Matched, "Compound").get(0, 8))); // Modify - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(t1, i(4, 5), col("Text", "Kittie", "Bacon"), col("Number", 2, 1), @@ -94,7 +95,7 @@ public void testMatch() { Arrays.asList(DataAccessHelpers.getColumn(t1Matched, "Compound").get(0, 8))); // All 3 - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(t1, i(0, 1, 4, 11), col("Text", "Apple", "Bagel", "Boat", "YAY"), col("Number", 100, -200, 300, 400), @@ -143,7 +144,8 @@ private void doTestMatchRefilter(boolean isRefreshing) { Arrays.asList(DataAccessHelpers.getColumn(t1Matched, "InNum").get(0, 6))); // Tick one filter table - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(textTable, i(0, 2), col("Text", "Cheese", "Yo")); textTable.notifyListeners(i(2), i(), i(0)); }); @@ -154,7 +156,7 @@ private void doTestMatchRefilter(boolean isRefreshing) { Arrays.asList(DataAccessHelpers.getColumn(t1Matched, "InNum").get(0, 6))); // Tick both of them - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(textTable, i(0, 2), col("Text", "Lets go", "Hey")); textTable.notifyListeners(i(), i(), i(0, 2)); @@ -170,7 +172,7 @@ private void doTestMatchRefilter(boolean isRefreshing) { if (isRefreshing) { // Tick both of them, and the table itself - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(textTable, i(0, 2), col("Text", "Dog", "Yo")); textTable.notifyListeners(i(), i(), i(0, 2)); @@ -269,10 +271,11 @@ public void testMatchDynamicIterative() { getTable(setSize, random, numSetInfo = initColumnInfos(new String[] {"intCol"}, new IntGenerator(0, 100))); - final QueryTable symSetTable = (QueryTable) UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> symSetTableBase.selectDistinct("Sym")); - final QueryTable numSetTable = (QueryTable) UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> numSetTableBase.selectDistinct("intCol")); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final QueryTable symSetTable = (QueryTable) updateGraph.exclusiveLock().computeLocked( + () -> symSetTableBase.selectDistinct("Sym")); + final QueryTable numSetTable = (QueryTable) updateGraph.exclusiveLock().computeLocked( + () -> numSetTableBase.selectDistinct("intCol")); final QueryTable matchTable = getTable(filteredSize, random, filteredInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, @@ -296,7 +299,7 @@ public void testMatchDynamicIterative() { final boolean modFiltered = random.nextBoolean(); final int doit = i & 0x3; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { if (modSet) { if (doit == 0 || doit == 2) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, @@ -311,7 +314,7 @@ public void testMatchDynamicIterative() { }); validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { if (modFiltered) { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, filteredSize, random, matchTable, filteredInfo); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/SelectOverheadLimiter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/SelectOverheadLimiter.java index 36cf441d444..e640f9af3d4 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/SelectOverheadLimiter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/SelectOverheadLimiter.java @@ -12,7 +12,6 @@ import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableUpdate; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.NotificationQueue; import io.deephaven.engine.table.impl.sources.SwitchColumnSource; import io.deephaven.engine.table.impl.sources.sparse.SparseConstants; @@ -98,7 +97,7 @@ public static Table clampSelectOverhead(Table input, double permittedOverhead) { return input.flatten(); } - UpdateGraphProcessor.DEFAULT.checkInitiateTableOperation(); + input.getUpdateGraph().checkInitiateSerialTableOperation(); // now we know we are refreshing, so should update our overhead structure final OverheadTracker overheadTracker = new OverheadTracker(); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ShiftedColumnOperationTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ShiftedColumnOperationTest.java index fe50f9c105a..eaa719c1938 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ShiftedColumnOperationTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ShiftedColumnOperationTest.java @@ -8,6 +8,7 @@ import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.chunk.util.hashing.LongChunkEquals; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -25,13 +26,13 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.util.ChunkUtils; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.PrintListener; import io.deephaven.engine.util.TableTools; import io.deephaven.test.types.OutOfBandTest; @@ -96,7 +97,8 @@ private void testSimpleMatchPair(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(6, 8, 22, 24), intCol("Sentinel", 4, 5, 12, 13), intCol("Value", 16, 18, 32, 34), intCol("Value2", 162, 182, 322, 342)); table.notifyListeners(i(6, 8, 22, 24), i(), i()); @@ -106,15 +108,15 @@ private void testSimpleMatchPair(final int shiftConst) { String[] minusConstCols = new String[] {"CV3=Value_[i-" + shiftConst + "]", "CV32=Value2_[i-" + shiftConst + "]"}; - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(minusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(minusConstCols)), shiftMinusConst); String[] plusConstCols = new String[] {"CV2=Value_[i+" + shiftConst + "]", "CV22=Value2_[i+" + shiftConst + "]"}; - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(plusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(plusConstCols)), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2, 4, 26, 28), intCol("Sentinel", 2, 3, 14, 15), intCol("Value", 12, 14, 36, 38), intCol("Value2", 122, 142, 362, 382)); removeRows(table, i(6, 24)); @@ -122,9 +124,9 @@ private void testSimpleMatchPair(final int shiftConst) { }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(minusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(minusConstCols)), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(plusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(plusConstCols)), shiftPlusConst); plTable.stop(); @@ -168,7 +170,8 @@ private void testSimpleFillChunkSingleColumnChanges(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(0, 1, 12, 14), intCol("Sentinel", -1, 0, 6, 7), intCol("Value", 6, 8, 20, 22)); addToTable(table, i(8), intCol("Sentinel", 18), intCol("Value", 25)); table.notifyListeners(i(0, 1, 12, 14), i(), i(8)); @@ -176,10 +179,10 @@ private void testSimpleFillChunkSingleColumnChanges(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -226,7 +229,8 @@ private void testModifiedColSet(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(10, 12, 18), intCol("Sentinel", 6, 7, 10), @@ -234,26 +238,26 @@ private void testModifiedColSet(final int shiftConst) { intCol("Value2", 202, 222, 282), intCol("Value3", 2020, 2220, 2820)); - final RowSetShiftData.Builder shiftDataBuilder = new RowSetShiftData.Builder(); - final RowSetShiftData shiftData = shiftDataBuilder.build(); - final TableUpdateImpl update = new TableUpdateImpl(i(), i(), i(10, 12, 18), shiftData, + final RowSetShiftData.Builder shiftDataBuilder1 = new RowSetShiftData.Builder(); + final RowSetShiftData shiftData1 = shiftDataBuilder1.build(); + final TableUpdateImpl update1 = new TableUpdateImpl(i(), i(), i(10, 12, 18), shiftData1, table.newModifiedColumnSet("Value")); - table.notifyListeners(update); + table.notifyListeners(update1); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); String[] minusConstCols = new String[] {"CV3=Value_[i-" + shiftConst + "]", "CV32=Value2_[i-" + shiftConst + "]", "CV33=Value3_[i-" + shiftConst + "]"}; - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(minusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(minusConstCols)), shiftMinusConst); String[] plusConstCols = new String[] {"CV2=Value_[i+" + shiftConst + "]", "CV22=Value2_[i+" + shiftConst + "]", "CV23=Value3_[i+" + shiftConst + "]"}; - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(plusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(plusConstCols)), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(14, 16, 20), intCol("Sentinel", 8, 9, 11), intCol("Value", 24, 26, 30), @@ -267,9 +271,9 @@ private void testModifiedColSet(final int shiftConst) { }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(minusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(minusConstCols)), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(plusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(plusConstCols)), shiftPlusConst); plTable.stop(); @@ -318,7 +322,8 @@ private void testModifiedColSetMultiColMapping(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(10, 12, 18), intCol("Sentinel", 6, 7, 10), @@ -326,11 +331,11 @@ private void testModifiedColSetMultiColMapping(final int shiftConst) { intCol("Value2", 202, 222, 282), intCol("Value3", 2020, 2220, 2820)); - final RowSetShiftData.Builder shiftDataBuilder = new RowSetShiftData.Builder(); - final RowSetShiftData shiftData = shiftDataBuilder.build(); - final TableUpdateImpl update = new TableUpdateImpl(i(), i(), i(10, 12, 18), shiftData, + final RowSetShiftData.Builder shiftDataBuilder1 = new RowSetShiftData.Builder(); + final RowSetShiftData shiftData1 = shiftDataBuilder1.build(); + final TableUpdateImpl update1 = new TableUpdateImpl(i(), i(), i(10, 12, 18), shiftData1, table.newModifiedColumnSet("Value")); - table.notifyListeners(update); + table.notifyListeners(update1); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); @@ -339,17 +344,17 @@ private void testModifiedColSetMultiColMapping(final int shiftConst) { "CV3=Value_[i-" + shiftConst + "]", "C2V3=Value_[i-" + shiftConst + "]", "CV32=Value2_[i-" + shiftConst + "]", "C2V32=Value2_[i-" + shiftConst + "]", "CV33=Value3_[i-" + shiftConst + "]", "C2V33=Value3_[i-" + shiftConst + "]"}; - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(minusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(minusConstCols)), shiftMinusConst); String[] plusConstCols = new String[] { "CV2=Value_[i+" + shiftConst + "]", "C2V2=Value_[i+" + shiftConst + "]", "CV22=Value2_[i+" + shiftConst + "]", "C2V22=Value2_[i+" + shiftConst + "]", "CV23=Value3_[i+" + shiftConst + "]", "C2V23=Value3_[i+" + shiftConst + "]"}; - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(plusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(plusConstCols)), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(14, 16, 20), intCol("Sentinel", 8, 9, 11), intCol("Value", 24, 26, 30), @@ -363,9 +368,9 @@ private void testModifiedColSetMultiColMapping(final int shiftConst) { }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(minusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(minusConstCols)), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update(plusConstCols)), + assertTableEquals(updateGraph.sharedLock().computeLocked(() -> table.update(plusConstCols)), shiftPlusConst); plTable.stop(); @@ -409,7 +414,8 @@ private void testSimple5(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(6, 8, 22, 24), intCol("Sentinel", 4, 5, 12, 13), intCol("Value", 16, 18, 32, 34)); table.notifyListeners(i(6, 8, 22, 24), i(), i()); @@ -417,12 +423,12 @@ private void testSimple5(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2, 4, 26, 28), intCol("Sentinel", 2, 3, 14, 15), intCol("Value", 12, 14, 36, 38)); removeRows(table, i(6, 24)); table.notifyListeners(i(2, 4, 26, 28), i(6, 24), i()); @@ -430,10 +436,10 @@ private void testSimple5(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -476,7 +482,8 @@ private void testAddRemoveAtTheEnd(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(6, 8, 22, 24), intCol("Sentinel", 4, 5, 12, 13), intCol("Value", 16, 18, 32, 34)); table.notifyListeners(i(6, 8, 22, 24), i(), i()); @@ -484,13 +491,13 @@ private void testAddRemoveAtTheEnd(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2, 4, 26, 28), intCol("Sentinel", 2, 3, 14, 15), intCol("Value", 12, 14, 36, 38)); removeRows(table, i(6, 24)); table.notifyListeners(i(2, 4, 26, 28), i(6, 24), i()); @@ -498,10 +505,10 @@ private void testAddRemoveAtTheEnd(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -544,7 +551,8 @@ private void testAddRemoveOverlap(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(6, 14, 18, 24), intCol("Sentinel", 4, 5, 12, 13), intCol("Value", 16, 18, 32, 34)); removeRows(table, i(10, 14, 18, 20)); table.notifyListeners(i(6, 24), i(10, 14, 18, 20), i()); @@ -553,10 +561,10 @@ private void testAddRemoveOverlap(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -611,7 +619,8 @@ public void testAddRemove() { FailureListener tuvPlusConstFailureListener = new FailureListener(); tuvMPlusConst.getResultTable().addUpdateListener(tuvPlusConstFailureListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(6, 8, 22, 24), intCol("Sentinel", 4, 5, 12, 13), intCol("Value", 16, 18, 32, 34)); table.notifyListeners(i(6, 8, 22, 24), i(), i()); @@ -627,18 +636,16 @@ public void testAddRemove() { System.out.println("---shiftPlusConst---"); TableTools.showWithRowSet(shiftPlusConst); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2, 4, 26, 28), intCol("Sentinel", 2, 3, 14, 15), intCol("Value", 12, 14, 36, 38)); removeRows(table, i(6, 24)); table.notifyListeners(i(2, 4, 26, 28), i(6, 24), i()); @@ -654,16 +661,14 @@ public void testAddRemove() { System.out.println("---shiftPlusConst---"); TableTools.showWithRowSet(shiftPlusConst); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); } @Test @@ -714,7 +719,8 @@ public void testContiguousAddUpdateRemove() { FailureListener tuvPlusConstFailureListener = new FailureListener(); tuvMPlusConst.getResultTable().addUpdateListener(tuvPlusConstFailureListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(4, 6, 8, 14, 16, 18, 22, 24, 26, 32, 34, 36), intCol("Sentinel", 1, 2, 3, 6, 7, 8, 10, 11, 12, 15, 16, 17), intCol("Value", 12, 14, 16, 22, 24, 26, 30, 32, 34, 40, 42, 44)); @@ -732,18 +738,16 @@ public void testContiguousAddUpdateRemove() { System.out.println("---shiftPlusConst---"); TableTools.showWithRowSet(shiftPlusConst); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(4, 6, 8, 14, 16, 18, 22, 24, 26, 32, 34, 36), intCol("Sentinel", 101, 102, 103, 106, 107, 108, 110, 111, 112, 115, 116, 117), intCol("Value", 112, 114, 116, 122, 124, 126, 130, 132, 134, 140, 142, 144)); @@ -761,18 +765,16 @@ public void testContiguousAddUpdateRemove() { System.out.println("---shiftPlusConst---"); TableTools.showWithRowSet(shiftPlusConst); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(4, 6, 8, 14, 16, 18, 22, 24, 26, 32, 34, 36)); table.notifyListeners(i(), i(4, 6, 8, 14, 16, 18, 22, 24, 26, 32, 34, 36), i()); }); @@ -787,16 +789,14 @@ public void testContiguousAddUpdateRemove() { System.out.println("---shiftPlusConst---"); TableTools.showWithRowSet(shiftPlusConst); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); } @Test @@ -836,16 +836,17 @@ private void testSimpleRemoveRangeApproach(final int shiftConst) { final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(6, 10)); table.notifyListeners(i(), i(6, 10), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -887,16 +888,17 @@ private void testSimpleRemoveChanges(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(6)); table.notifyListeners(i(), i(6), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -939,7 +941,8 @@ private void testSimpleModifyChanges(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(0, 2, 4, 8, 12, 14, 18), intCol("Sentinel", 11, 12, 13, 15, 17, 18, 20), intCol("Value", 9, 11, 13, 17, 21, 23, 27)); table.notifyListeners(i(), i(), i(0, 2, 4, 8, 12, 14, 18)); @@ -947,12 +950,12 @@ private void testSimpleModifyChanges(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(0, 2, 4, 8, 12, 14, 18), intCol("Sentinel", 21, 22, 23, 25, 27, 28, 30), intCol("Value", 109, 111, 113, 117, 121, 123, 127)); final RowSetShiftData.Builder shiftDataBuilder = new RowSetShiftData.Builder(); @@ -964,10 +967,10 @@ private void testSimpleModifyChanges(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1044,7 +1047,8 @@ public void testSimpleAddSingleColumnChanges() { final PrintListener printListener = new PrintListener("shiftMinus4", shiftMinusConst, 10); final PrintListener printListenerPlusConst = new PrintListener("shiftPlus4", shiftPlusConst, 10); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(0, 1, 12, 14), intCol("Sentinel", -1, 0, 6, 7), intCol("Value", 98, 99, 105, 106)); table.notifyListeners(i(0, 1, 12, 14), i(), i()); }); @@ -1059,16 +1063,14 @@ public void testSimpleAddSingleColumnChanges() { System.out.println("---shiftPlusConst---"); TableTools.showWithRowSet(shiftPlusConst); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); printListenerOrig.stop(); printListener.stop(); @@ -1153,7 +1155,8 @@ public void testSimpleAddMultipleColumnChanges() { final PrintListener printListener = new PrintListener("shiftMinus4", shiftMinusConst, 10); final PrintListener printListenerPlusConst = new PrintListener("shiftPlus4", shiftPlusConst, 10); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(0, 1, 12, 14), intCol("Sentinel", -1, 0, 6, 7), intCol("Value", 98, 99, 105, 106), intCol("Value2", 980, 990, 1050, 1060), intCol("Value3", 1980, 1990, 2050, 2060)); table.notifyListeners(i(0, 1, 12, 14), i(), i()); @@ -1169,23 +1172,19 @@ public void testSimpleAddMultipleColumnChanges() { System.out.println("---shiftPlusConst---"); TableTools.showWithRowSet(shiftPlusConst); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( - () -> table.update("V3 = Value_[i - 1]", "V32 = Value2_[i - 1]", "V33 = Value3_[i - 1]")), + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]", "V32 = Value2_[i - 1]", "V33 = Value3_[i - 1]")), shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( - () -> table.update("V2 = Value_[i + 1]", "V22 = Value2_[i + 1]", "V23 = Value3_[i + 1]")), + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]", "V22 = Value2_[i + 1]", "V23 = Value3_[i + 1]")), shiftPlusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]", - "CV32 = Value2_[i - " + shiftConst + "]", "CV33 = Value3_[i - " + shiftConst + "]")), + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]", + "CV32 = Value2_[i - " + shiftConst + "]", "CV33 = Value3_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]", - "CV22 = Value2_[i + " + shiftConst + "]", "CV23 = Value3_[i + " + shiftConst + "]")), + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]", + "CV22 = Value2_[i + " + shiftConst + "]", "CV23 = Value3_[i + " + shiftConst + "]")), shiftPlusConst); printListenerOrig.stop(); @@ -1231,16 +1230,17 @@ private void testRemoveWithTUV(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(8)); table.notifyListeners(i(), i(8), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1283,17 +1283,18 @@ private void testAddSingleNewRowToBeginning(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2), intCol("Sentinel", 1), intCol("Value", 10)); table.notifyListeners(i(2), i(), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1336,17 +1337,18 @@ private void testAddMultipleNewRowToBeginning(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2, 4), intCol("Sentinel", 1, 2), intCol("Value", 10, 12)); table.notifyListeners(i(2, 4), i(), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1389,17 +1391,18 @@ private void testAddSingleNewRowToEnd(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(12), intCol("Sentinel", 6), intCol("Value", 20)); table.notifyListeners(i(12), i(), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1442,17 +1445,18 @@ private void testAddMultipleNewRowsToEnd(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(12, 14, 16), intCol("Sentinel", 6, 7, 8), intCol("Value", 20, 22, 24)); table.notifyListeners(i(12, 14, 16), i(), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1495,17 +1499,18 @@ private void testAddMultipleNonContiguousRows(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(3, 5, 7), intCol("Sentinel", 2, 4, 6), intCol("Value", 11, 13, 15)); table.notifyListeners(i(7, 5, 3), i(), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1548,7 +1553,8 @@ private void testAddMultipleContiguousNewRows(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(3, 5, 7, 9, 11, 13), intCol("Sentinel", 2, 4, 6, 8, 10, 12), intCol("Value", 11, 13, 15, 17, 19, 21)); table.notifyListeners(i(7, 5, 3, 9, 11, 13), i(), i()); @@ -1556,10 +1562,10 @@ private void testAddMultipleContiguousNewRows(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1602,17 +1608,18 @@ private void testAddRowsToBeginningMiddle(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2, 4, 8, 10), intCol("Sentinel", 1, 2, 4, 5), intCol("Value", 10, 12, 16, 18)); table.notifyListeners(i(2, 4, 8, 10), i(), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1654,17 +1661,18 @@ private void testAddRowsToBeginningMiddleAndEnd(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2, 4, 8, 10, 16, 18), intCol("Sentinel", 1, 2, 4, 5, 8, 9), intCol("Value", 10, 12, 16, 18, 24, 26)); table.notifyListeners(i(2, 4, 8, 10, 16, 18), i(), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1699,18 +1707,17 @@ public void testModificationNoShift() { final PrintListener plMinusOne = new PrintListener("Minus One", shiftMinusOne); final PrintListener plPlusOne = new PrintListener("Plus One", shiftPlusOne); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(6), intCol("Sentinel", 6), intCol("Value", 20)); table.notifyListeners(i(), i(), i(6)); }); printTableUpdates(table, shiftMinusOne, shiftPlusOne, "post-update", 1); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); plTable.stop(); plMinusOne.stop(); @@ -1746,7 +1753,8 @@ public void testShiftOnly() { final PrintListener plMinusOne = new PrintListener("Minus One", shiftMinusOne); final PrintListener plPlusOne = new PrintListener("Plus One", shiftPlusOne); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(6)); addToTable(table, i(7), intCol("Sentinel", 3), intCol("Value", 14)); final RowSetShiftData.Builder shiftDataBuilder = new RowSetShiftData.Builder(); @@ -1758,12 +1766,10 @@ public void testShiftOnly() { }); printTableUpdates(table, shiftMinusOne, shiftPlusOne, "post-update", 1); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); plTable.stop(); plMinusOne.stop(); @@ -1799,7 +1805,8 @@ public void testShiftAndModify() { final PrintListener plMinusOne = new PrintListener("Minus One", shiftMinusOne); final PrintListener plPlusOne = new PrintListener("Plus One", shiftPlusOne); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(6, 10)); addToTable(table, i(7), intCol("Sentinel", 3), intCol("Value", 16)); addToTable(table, i(9), intCol("Sentinel", 5), intCol("Value", 19)); @@ -1813,12 +1820,10 @@ public void testShiftAndModify() { }); printTableUpdates(table, shiftMinusOne, shiftPlusOne, "post-update", 1); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); plTable.stop(); plMinusOne.stop(); @@ -1854,7 +1859,8 @@ public void testAddAndShift() { final PrintListener plMinusOne = new PrintListener("Minus One", shiftMinusOne); final PrintListener plPlusOne = new PrintListener("Plus One", shiftPlusOne); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(4)); addToTable(table, i(3), intCol("Sentinel", 3), intCol("Value", 11)); addToTable(table, i(5), intCol("Sentinel", 5), intCol("Value", 14)); @@ -1868,12 +1874,10 @@ public void testAddAndShift() { printTableUpdates(table, shiftMinusOne, shiftPlusOne, "post-update", 1); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V3 = Value_[i - 1]")), - shiftMinusOne); - assertTableEquals( - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("V2 = Value_[i + 1]")), - shiftPlusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V3 = Value_[i - 1]")), shiftMinusOne); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("V2 = Value_[i + 1]")), shiftPlusOne); plTable.stop(); plMinusOne.stop(); @@ -1916,7 +1920,8 @@ private void testAddAndUpdateWithShift(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(9, 18)); addToTable(table, i(8), intCol("Sentinel", 4), intCol("Value", 17)); addToTable(table, i(10), intCol("Sentinel", 5), intCol("Value", 18)); @@ -1933,10 +1938,10 @@ private void testAddAndUpdateWithShift(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -1979,7 +1984,8 @@ private void testAddAndUpdateWithOutShift(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(8), intCol("Sentinel", 4), intCol("Value", 17)); addToTable(table, i(9), intCol("Sentinel", 6), intCol("Value", 19)); addToTable(table, i(18), intCol("Sentinel", 10), intCol("Value", 29)); @@ -1989,10 +1995,10 @@ private void testAddAndUpdateWithOutShift(final int shiftConst) { printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2035,17 +2041,18 @@ private void testRemoveSingleMiddleRow(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(6)); table.notifyListeners(i(), i(6), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2088,16 +2095,17 @@ private void testRemoveFirstRow(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2)); table.notifyListeners(i(), i(2), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2140,16 +2148,17 @@ private void testRemoveFirstTwoRows(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2, 4)); table.notifyListeners(i(), i(2, 4), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2192,17 +2201,18 @@ private void testRemoveLastRow(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(14)); table.notifyListeners(i(), i(14), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2245,16 +2255,17 @@ private void testRemoveLastTwoRows(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(12, 14)); table.notifyListeners(i(), i(12, 14), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2297,17 +2308,18 @@ private void testRemoveRandomNonFirstAndLastRows(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(4, 8, 12)); table.notifyListeners(i(), i(4, 8, 12), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2350,16 +2362,17 @@ private void testRemoveFirstLastAndRandomRows(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2, 4, 8, 12, 14)); table.notifyListeners(i(), i(2, 4, 8, 12, 14), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2402,17 +2415,18 @@ private void testRemoveFirstLastAndRandomRowsWithMoreData(final int shiftConst) final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2, 4, 8, 12, 14)); table.notifyListeners(i(), i(2, 4, 8, 12, 14), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2455,17 +2469,18 @@ private void testRemoveAllRows(final int shiftConst) { final PrintListener plMinusConst = new PrintListener("Minus Const", shiftMinusConst); final PrintListener plPlusConst = new PrintListener("Plus Const", shiftPlusConst); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2, 4, 6, 8, 10, 12, 14)); table.notifyListeners(i(), i(2, 4, 6, 8, 10, 12, 14), i()); }); printTableUpdates(table, shiftMinusConst, shiftPlusConst, "post-update", shiftConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV3 = Value_[i - " + shiftConst + "]")), shiftMinusConst); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("CV2 = Value_[i + " + shiftConst + "]")), shiftPlusConst); plTable.stop(); plMinusConst.stop(); @@ -2506,16 +2521,18 @@ public void testBigShift() { i(2, 4, 6, 8).toTracking(), intCol("Sentinel", 100, 101, 102, 103), intCol("Value", 201, 202, 203, 204)); - final Table shifted = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> ShiftedColumnOperation.addShiftedColumns(table, -1, "VS=Value")); + + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Table shifted = updateGraph.sharedLock().computeLocked( + () -> ShiftedColumnOperation.addShiftedColumns(table, -1, "VS=Value")); TableTools.showWithRowSet(shifted); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("VS=Value_[i-1]")), - shifted); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("VS=Value_[i-1]")), shifted); final PrintListener pl = new PrintListener("Shifted Result", shifted, 10); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table, i(2, 4, 6, 8)); addToTable(table, i(1002, 1003, 1004, 1008), intCol("Sentinel", 100, 104, 101, 103), intCol("Value", 201, 205, 202, 204)); @@ -2532,8 +2549,8 @@ public void testBigShift() { }); TableTools.showWithRowSet(shifted); - assertTableEquals(UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> table.update("VS=Value_[i-1]")), - shifted); + assertTableEquals(updateGraph.sharedLock().computeLocked( + () -> table.update("VS=Value_[i-1]")), shifted); pl.stop(); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/SparseSelectTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/SparseSelectTest.java index 3cb99270f1b..09d63adee25 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/SparseSelectTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/SparseSelectTest.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -12,7 +13,6 @@ import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.*; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.test.types.OutOfBandTest; @@ -186,7 +186,8 @@ public void testSparseSelectReuse() { assertTableEquals(selected, table); assertTableEquals(TstUtils.prevTable(selected), TstUtils.prevTable(table)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(2), TableTools.longCol("Value", 3)); table.notifyListeners(i(2), i(), i()); }); @@ -197,7 +198,7 @@ public void testSparseSelectReuse() { TableTools.show(table); TableTools.show(selected); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(1L << 20 + 2), TableTools.longCol("Value", 4)); table.notifyListeners(i(1L << 20 + 2), i(), i()); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestAggBy.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestAggBy.java index b212c0ee445..d7749be1124 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestAggBy.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestAggBy.java @@ -21,7 +21,6 @@ import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.*; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -405,12 +404,13 @@ public void testComboByDistinct() { assertArrayEquals(new char[] {'c'}, cs.get(2).toArray()); assertArrayEquals(new char[] {'d'}, cs.get(3).toArray()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet toAdd = i(4, 5, 6, 7); - addToTable(dataTable, toAdd, + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet toAdd2 = i(4, 5, 6, 7); + addToTable(dataTable, toAdd2, intCol("Grp", 1, 2, 3, 4), charCol("Let", 'e', 'f', 'g', 'h')); - dataTable.notifyListeners(toAdd, i(), i()); + dataTable.notifyListeners(toAdd2, i(), i()); }); assertEquals(4, result.size()); assertArrayEquals(new char[] {'a', 'e'}, cs.get(0).toArray()); @@ -418,19 +418,19 @@ public void testComboByDistinct() { assertArrayEquals(new char[] {'c', 'g'}, cs.get(2).toArray()); assertArrayEquals(new char[] {'d', 'h'}, cs.get(3).toArray()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet toAdd = i(8, 9, 10, 11); - addToTable(dataTable, toAdd, + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet toAdd1 = i(8, 9, 10, 11); + addToTable(dataTable, toAdd1, intCol("Grp", 1, 2, 3, 4), charCol("Let", 'i', 'j', 'k', 'l')); - dataTable.notifyListeners(toAdd, i(), i()); + dataTable.notifyListeners(toAdd1, i(), i()); }); assertArrayEquals(new char[] {'e', 'i'}, cs.get(0).toArray()); assertArrayEquals(new char[] {'f', 'j'}, cs.get(1).toArray()); assertArrayEquals(new char[] {'c', 'g', 'k'}, cs.get(2).toArray()); assertArrayEquals(new char[] {'d', 'h', 'l'}, cs.get(3).toArray()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet toAdd = i(12, 13, 14, 15); addToTable(dataTable, toAdd, intCol("Grp", 1, 2, 3, 4), @@ -442,7 +442,7 @@ public void testComboByDistinct() { assertArrayEquals(new char[] {'g', 'k', 'o'}, cs.get(2).toArray()); assertArrayEquals(new char[] {'h', 'l', 'p'}, cs.get(3).toArray()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(16), intCol("Grp", 1), charCol("Let", 'q')); dataTable.notifyListeners(i(16), i(), i()); }); @@ -451,7 +451,7 @@ public void testComboByDistinct() { assertArrayEquals(new char[] {'k', 'o'}, cs.get(2).toArray()); assertArrayEquals(new char[] {'h', 'l', 'p'}, cs.get(3).toArray()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(17), intCol("Grp", 2), charCol("Let", 'r')); dataTable.notifyListeners(i(17), i(), i()); }); @@ -460,7 +460,7 @@ public void testComboByDistinct() { assertArrayEquals(new char[] {'k', 'o'}, cs.get(2).toArray()); assertArrayEquals(new char[] {'l', 'p'}, cs.get(3).toArray()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(18), intCol("Grp", 3), charCol("Let", 's')); dataTable.notifyListeners(i(18), i(), i()); }); @@ -469,7 +469,7 @@ public void testComboByDistinct() { assertArrayEquals(new char[] {'k', 'o', 's'}, cs.get(2).toArray()); assertArrayEquals(new char[] {'l', 'p'}, cs.get(3).toArray()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(19), intCol("Grp", 4), charCol("Let", 't')); dataTable.notifyListeners(i(19), i(), i()); }); @@ -494,7 +494,8 @@ public void testComboByCountDistinct() { assertArrayEquals(new Object[] {"VXX", 1L, 1L}, DataAccessHelpers.getRecord(result, 3)); assertTableEquals(result, countNulls); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(1, 10), col("USym", "AAPL", "VXX"), longCol("Account", QueryConstants.NULL_LONG, 1), @@ -508,7 +509,7 @@ public void testComboByCountDistinct() { assertArrayEquals(new Object[] {"AAPL", 3L, 2L}, DataAccessHelpers.getRecord(countNulls, 0)); assertArrayEquals(new Object[] {"VXX", 2L, 2L}, DataAccessHelpers.getRecord(countNulls, 3)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(2), col("USym", "AAPL"), longCol("Account", QueryConstants.NULL_LONG), @@ -521,7 +522,7 @@ public void testComboByCountDistinct() { TableTools.showWithRowSet(dataTable, dataTable.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(1, 2, 11), col("USym", "AAPL", "AAPL", "SPY"), longCol("Account", 1, 2, QueryConstants.NULL_LONG), @@ -563,7 +564,8 @@ public void testComboByAggUnique() { assertArrayEquals(new Object[] {"VXX", 5L, 50, null}, DataAccessHelpers.getRecord(result, 3)); assertTableEquals(result, countNulls); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(2, 10), col("USym", "AAPL", "VXX"), longCol("Account", 1, 5), @@ -580,7 +582,7 @@ public void testComboByAggUnique() { // Check the nulls table assertArrayEquals(new Object[] {"VXX", 5L, -1, null}, DataAccessHelpers.getRecord(countNulls, 3)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(11), col("USym", "USO"), longCol("Account", 2), @@ -598,7 +600,7 @@ public void testComboByAggUnique() { assertArrayEquals(new Object[] {"AAPL", 1L, 100, dtDefault}, DataAccessHelpers.getRecord(countNulls, 0)); // - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(11), col("USym", "USO"), longCol("Account", QueryConstants.NULL_LONG), @@ -609,7 +611,7 @@ public void testComboByAggUnique() { assertArrayEquals(new Object[] {"USO", null, null, dt2}, DataAccessHelpers.getRecord(result, 3)); // - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(3, 4, 9, 10), col("USym", "GOOG", "GOOG", "VXX", "VXX"), longCol("Account", 2L, 2L, QueryConstants.NULL_LONG, 99), diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestBlinkTableTools.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestBlinkTableTools.java index f3d689b42e4..a171f2640e9 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestBlinkTableTools.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestBlinkTableTools.java @@ -3,16 +3,17 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import junit.framework.TestCase; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.time.Instant; @@ -22,16 +23,9 @@ import static io.deephaven.engine.testutil.TstUtils.i; public class TestBlinkTableTools { - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } + @Rule + public final EngineCleanup base = new EngineCleanup(); @Test public void testBlinkToAppendOnlyTable() { @@ -49,18 +43,19 @@ public void testBlinkToAppendOnlyTable() { TestCase.assertEquals(true, appendOnly.getAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE)); TestCase.assertTrue(appendOnly.isFlat()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - RowSet removed = blinkTable.getRowSet().copyPrev(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + RowSet removed1 = blinkTable.getRowSet().copyPrev(); ((WritableRowSet) blinkTable.getRowSet()).clear(); TstUtils.addToTable(blinkTable, i(7), intCol("I", 1), doubleCol("D", Math.PI), instantCol("DT", dt2), col("B", true)); - blinkTable.notifyListeners(i(7), removed, i()); + blinkTable.notifyListeners(i(7), removed1, i()); }); assertTableEquals(TableTools.newTable(intCol("I", 7, 1), doubleCol("D", Double.NEGATIVE_INFINITY, Math.PI), instantCol("DT", dt1, dt2), col("B", true, true)), appendOnly); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { RowSet removed = blinkTable.getRowSet().copyPrev(); ((WritableRowSet) blinkTable.getRowSet()).clear(); TstUtils.addToTable(blinkTable, i(7), intCol("I", 2), doubleCol("D", Math.E), instantCol("DT", dt3), @@ -71,7 +66,6 @@ public void testBlinkToAppendOnlyTable() { TableTools.newTable(intCol("I", 7, 1, 2), doubleCol("D", Double.NEGATIVE_INFINITY, Math.PI, Math.E), instantCol("DT", dt1, dt2, dt3), col("B", true, true, false)), appendOnly); - } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestCodecColumns.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestCodecColumns.java index ad86f9b3f4d..12e776eab0a 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestCodecColumns.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestCodecColumns.java @@ -8,7 +8,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.parquet.table.BigIntegerParquetBytesCodec; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.parquet.table.ParquetTools; import io.deephaven.engine.util.TableTools; import io.deephaven.parquet.table.ParquetInstructions; @@ -16,6 +16,8 @@ import io.deephaven.util.codec.*; import junit.framework.TestCase; import org.apache.commons.lang3.mutable.MutableObject; +import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.io.File; @@ -75,20 +77,28 @@ public class TestCodecColumns { VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION, VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION_S); - private static final Table TABLE = TableTools.newTable(TABLE_DEFINITION, - TableTools.col("VWBA", new byte[] {0, 1, 2}, null, new byte[] {3, 4, 5, 6}), - TableTools.col("VWCD", null, new ArrayTuple(0, 2, 4, 6), new ArrayTuple(1, 3, 5, 7)), - TableTools.col("FWBA", new byte[] {7, 8, 9, 10, 11, 12, 13, 14, 15}, - new byte[] {16, 17, 18, 19, 20, 21, 22, 23, 24}, new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0}), - TableTools.col("VWBI", BigInteger.valueOf(91), BigInteger.valueOf(111111111111111L), null), - TableTools.col("VWBIS", BigInteger.valueOf(94), null, BigInteger.valueOf(111111111111112L))); + @Rule + public final EngineCleanup base = new EngineCleanup(); + + private Table table; + + @Before + public void setUp() { + table = TableTools.newTable(TABLE_DEFINITION, + TableTools.col("VWBA", new byte[] {0, 1, 2}, null, new byte[] {3, 4, 5, 6}), + TableTools.col("VWCD", null, new ArrayTuple(0, 2, 4, 6), new ArrayTuple(1, 3, 5, 7)), + TableTools.col("FWBA", new byte[] {7, 8, 9, 10, 11, 12, 13, 14, 15}, + new byte[] {16, 17, 18, 19, 20, 21, 22, 23, 24}, new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0}), + TableTools.col("VWBI", BigInteger.valueOf(91), BigInteger.valueOf(111111111111111L), null), + TableTools.col("VWBIS", BigInteger.valueOf(94), null, BigInteger.valueOf(111111111111112L))); + } @Test public void doColumnsTest() throws IOException { final File dir = Files.createTempDirectory(Paths.get(""), "CODEC_TEST").toFile(); final File dest = new File(dir, "Test.parquet"); try { - ParquetTools.writeTable(TABLE, dest, TABLE.getDefinition(), writeInstructions); + ParquetTools.writeTable(table, dest, table.getDefinition(), writeInstructions); final MutableObject instructionsOut = new MutableObject<>(); final Table result = ParquetTools.readParquetSchemaAndTable(dest, ParquetInstructions.EMPTY, instructionsOut); @@ -97,7 +107,7 @@ public void doColumnsTest() throws IOException { final ParquetInstructions readInstructions = instructionsOut.getValue(); TestCase.assertTrue( ParquetInstructions.sameColumnNamesAndCodecMappings(expectedReadInstructions, readInstructions)); - TstUtils.assertTableEquals(TABLE, result); + TstUtils.assertTableEquals(table, result); } finally { FileUtils.deleteRecursively(dir); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestConcurrentInstantiation.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestConcurrentInstantiation.java index 81ba5b0a1b0..2124fe02840 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestConcurrentInstantiation.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestConcurrentInstantiation.java @@ -20,17 +20,12 @@ import io.deephaven.engine.table.impl.remote.ConstructSnapshot; import io.deephaven.engine.table.impl.select.*; import io.deephaven.engine.table.impl.util.ColumnHolder; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.QueryTableTestBase; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.BooleanGenerator; -import io.deephaven.engine.testutil.GenerateTableUpdates; -import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.DoubleGenerator; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.SortedBy; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; @@ -100,7 +95,7 @@ public void testTreeTableFilter() throws ExecutionException, InterruptedExceptio () -> (QueryTable) treed.getSource().apply(new TreeTableFilter.Operator((TreeTableImpl) treed, WhereFilterFactory.getExpressions("Sentinel in 4, 6, 9, 11, 12, 13, 14, 15"))); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table rawSorted = pool.submit(callable).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); TableTools.show(rawSorted); @@ -119,13 +114,13 @@ public void testTreeTableFilter() throws ExecutionException, InterruptedExceptio final Future
future3 = pool.submit(callable); assertTableEquals(rawSorted, table2); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); final Table table3 = future3.get(TIMEOUT_LENGTH, TIMEOUT_UNIT); assertTableEquals(rawSorted, table2); assertTableEquals(table2, table3); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); TstUtils.addToTable(source, i(11), col("Sentinel", 12), @@ -137,7 +132,7 @@ public void testTreeTableFilter() throws ExecutionException, InterruptedExceptio assertTableEquals(table3, table4); source.notifyListeners(i(11), i(), i()); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); assertArrayEquals( new int[] {1, 2, 3, 4, 6, 9, 10, 11, 12}, @@ -153,7 +148,7 @@ public void testFlatten() throws ExecutionException, InterruptedException, Timeo final Table tableStart = TstUtils.testRefreshingTable(i(2, 4, 6).toTracking(), col("x", 1, 2, 3), col("y", "a", "b", "c")); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table flat = pool.submit(table::flatten).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -174,7 +169,7 @@ public void testFlatten() throws ExecutionException, InterruptedException, Timeo TstUtils.assertTableEquals(prevTable(flat), tableStart); TstUtils.assertTableEquals(prevTable(flat2), tableStart); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(table, flat); TstUtils.assertTableEquals(table, flat2); @@ -192,7 +187,7 @@ public void testUpdateView() throws ExecutionException, InterruptedException, Ti final Callable
callable = () -> table.updateView("z=x*4"); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table updateView1 = pool.submit(callable).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -212,7 +207,7 @@ public void testUpdateView() throws ExecutionException, InterruptedException, Ti TstUtils.assertTableEquals(tableStart, prevTable(updateView1)); TstUtils.assertTableEquals(tableStart, prevTable(updateView2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(tableUpdate, updateView1); TstUtils.assertTableEquals(tableUpdate, updateView2); @@ -229,7 +224,7 @@ public void testView() throws ExecutionException, InterruptedException, TimeoutE final Callable
callable = () -> table.view("y", "z=x*4"); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table updateView1 = pool.submit(callable).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -249,7 +244,7 @@ public void testView() throws ExecutionException, InterruptedException, TimeoutE TstUtils.assertTableEquals(tableStart, prevTable(updateView1)); TstUtils.assertTableEquals(tableStart, prevTable(updateView2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(tableUpdate, updateView1); TstUtils.assertTableEquals(tableUpdate, updateView2); @@ -267,7 +262,7 @@ public void testDropColumns() throws ExecutionException, InterruptedException, T final Callable
callable = () -> table.dropColumns("z"); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table dropColumns1 = pool.submit(callable).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -287,7 +282,7 @@ public void testDropColumns() throws ExecutionException, InterruptedException, T TstUtils.assertTableEquals(tableStart, prevTable(dropColumns1)); TstUtils.assertTableEquals(tableStart, prevTable(dropColumns2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(tableUpdate, dropColumns1); TstUtils.assertTableEquals(tableUpdate, dropColumns2); @@ -303,7 +298,7 @@ public void testWhere() throws ExecutionException, InterruptedException, Timeout final Table tableUpdate = TstUtils.testRefreshingTable(i(2, 3, 6).toTracking(), col("x", 1, 4, 3), col("y", "a", "d", "c"), col("z", true, true, true)); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table filter1 = pool.submit(() -> table.where("z")).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -323,7 +318,7 @@ public void testWhere() throws ExecutionException, InterruptedException, Timeout TstUtils.assertTableEquals(tableStart, prevTable(filter1)); TstUtils.assertTableEquals(tableStart, prevTable(filter2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(tableUpdate, filter1); TstUtils.assertTableEquals(tableUpdate, filter2); @@ -338,7 +333,7 @@ public void testWhere2() throws ExecutionException, InterruptedException, Timeou final Table testUpdate = TstUtils.testRefreshingTable(i(3, 6).toTracking(), col("x", 4, 3), col("y", "d", "c"), col("z", true, true)); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table filter1 = pool.submit(() -> table.where("z")).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -358,7 +353,7 @@ public void testWhere2() throws ExecutionException, InterruptedException, Timeou TstUtils.assertTableEquals(tableStart, prevTable(filter1)); TstUtils.assertTableEquals(tableStart, prevTable(filter2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); showWithRowSet(table); showWithRowSet(filter1); @@ -380,10 +375,10 @@ public void testWhereDynamic() throws ExecutionException, InterruptedException, col("x", 4, 3), col("y", "d", "c"), col("z", true, true)); final QueryTable whereTable = TstUtils.testRefreshingTable(i(0).toTracking(), col("z", true)); - final DynamicWhereFilter filter = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> new DynamicWhereFilter(whereTable, true, MatchPairFactory.getExpressions("z"))); + final DynamicWhereFilter filter = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> new DynamicWhereFilter(whereTable, true, MatchPairFactory.getExpressions("z"))); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Future
future1 = dualPool.submit(() -> table.where(filter)); try { @@ -398,7 +393,7 @@ public void testWhereDynamic() throws ExecutionException, InterruptedException, assertTableEquals(tableStart, prevTable(filter2)); table.notifyListeners(i(3), i(), i(2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); final Table filter1 = future1.get(TIMEOUT_LENGTH, TIMEOUT_UNIT); TstUtils.assertTableEquals(testUpdate, filter1); @@ -413,7 +408,7 @@ public void testSort() throws ExecutionException, InterruptedException, TimeoutE final Table tableUpdate = TstUtils.testRefreshingTable(i(1, 2, 3, 4).toTracking(), col("x", 4, 3, 2, 1), col("y", "d", "c", "b", "a")); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table sort1 = pool.submit(() -> table.sortDescending("x")).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -433,7 +428,7 @@ public void testSort() throws ExecutionException, InterruptedException, TimeoutE TstUtils.assertTableEquals(tableStart, prevTable(sort1)); TstUtils.assertTableEquals(tableStart, prevTable(sort2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(tableUpdate, sort1); TstUtils.assertTableEquals(tableUpdate, sort2); @@ -452,7 +447,7 @@ public void testReverse() throws ExecutionException, InterruptedException, Timeo final Table tableUpdate3 = TstUtils.testRefreshingTable(i(1, 2, 3, 4, 5, 6).toTracking(), col("x", 6, 5, 4, 3, 2, 1), col("y", "f", "e", "d", "c", "b", "a")); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table reverse1 = pool.submit(table::reverse).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -472,13 +467,14 @@ public void testReverse() throws ExecutionException, InterruptedException, Timeo TstUtils.assertTableEquals(tableStart, prevTable(reverse1)); TstUtils.assertTableEquals(tableStart, prevTable(reverse2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(tableUpdate, reverse1); TstUtils.assertTableEquals(tableUpdate, reverse2); TstUtils.assertTableEquals(tableUpdate, reverse3); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(10000), col("x", 5), col("y", "e")); table.notifyListeners(i(10000), i(), i()); }); @@ -489,7 +485,7 @@ public void testReverse() throws ExecutionException, InterruptedException, Timeo assertTableEquals(tableUpdate2, reverse2); assertTableEquals(tableUpdate2, reverse3); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(10001), col("x", 6), col("y", "f")); table.notifyListeners(i(10001), i(), i()); }); @@ -506,7 +502,8 @@ public void testSortOfPartitionBy() throws ExecutionException, InterruptedExcept col("x", 1, 2, 3), col("y", "a", "a", "a")); final PartitionedTable pt = table.partitionBy("y"); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.startCycleForUnitTests(); TstUtils.addToTable(table, i(3), col("x", 4), col("y", "d")); @@ -514,8 +511,8 @@ public void testSortOfPartitionBy() throws ExecutionException, InterruptedExcept // We need to flush two notifications: one for the source table and one for the "withView" table in the // aggregation helper. - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + updateGraph.flushOneNotificationForUnitTests(); + updateGraph.flushOneNotificationForUnitTests(); final Table tableA = pt.constituentFor("a"); final Table tableD = pt.constituentFor("d"); @@ -531,7 +528,7 @@ public void testSortOfPartitionBy() throws ExecutionException, InterruptedExcept TstUtils.assertTableEquals(tableD, sortD); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); } public void testChain() throws ExecutionException, InterruptedException, TimeoutException { @@ -545,7 +542,8 @@ public void testChain() throws ExecutionException, InterruptedException, Timeout final Callable
callable = () -> table.updateView("u=x*4").where("z").sortDescending("x"); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.startCycleForUnitTests(); final Table chain1 = pool.submit(callable).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -567,7 +565,7 @@ public void testChain() throws ExecutionException, InterruptedException, Timeout TstUtils.assertTableEquals(tableStart, prevTable(chain1)); TstUtils.assertTableEquals(tableStart, prevTable(chain2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); TstUtils.assertTableEquals(tableUpdate, chain1); TstUtils.assertTableEquals(tableUpdate, chain2); @@ -653,7 +651,7 @@ private void testIterative(List> transformations, int see splitCallables.add(new Pair<>(firstHalf, secondHalf)); } - final Table standard = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> { + final Table standard = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked(() -> { try { return complete.call(); } catch (Exception e) { @@ -701,7 +699,7 @@ private void testIterative(List> transformations, int see beforeStartFirstHalf.add(pool.submit(splitCallable.first).get(TIMEOUT_LENGTH, TIMEOUT_UNIT)); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); if (beforeUpdate) { // before we update the underlying data @@ -856,7 +854,8 @@ public void onFailureInternal(Throwable originalException, Entry sourceEntry) { dynamicTable.addUpdateListener(listener); } lastResultSize = results.size(); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .completeCycleForUnitTests(); if (beforeStartAndAfterCycle) { final List
beforeStartAndAfterCycleSplitResults = new ArrayList<>(splitCallables.size()); @@ -873,7 +872,7 @@ public void onFailureInternal(Throwable originalException, Entry sourceEntry) { for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = UpdateGraphProcessor.DEFAULT.exclusiveLock() + final Table splitResult = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() .computeLocked(() -> splitCallables.get(fSplitIndex).second .apply(beforeStartFirstHalf.get(fSplitIndex))) .withAttributes(Map.of( @@ -900,7 +899,7 @@ public void onFailureInternal(Throwable originalException, Entry sourceEntry) { for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = UpdateGraphProcessor.DEFAULT.exclusiveLock() + final Table splitResult = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() .computeLocked(() -> splitCallables.get(fSplitIndex).second .apply(beforeUpdateFirstHalf.get(fSplitIndex))) .withAttributes(Map.of( @@ -956,7 +955,7 @@ public void onFailureInternal(Throwable originalException, Entry sourceEntry) { for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = UpdateGraphProcessor.DEFAULT.exclusiveLock() + final Table splitResult = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() .computeLocked(() -> splitCallables.get(fSplitIndex).second .apply(beforeCycleFirstHalf.get(fSplitIndex))) .withAttributes(Map.of( @@ -1004,7 +1003,7 @@ public void testSelectDistinct() throws ExecutionException, InterruptedException final Table expected2 = newTable(col("y", "a", "d", "b", "c")); final Table expected2outOfOrder = newTable(col("y", "a", "b", "c", "d")); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Callable
callable = () -> table.selectDistinct("y"); @@ -1030,7 +1029,7 @@ public void testSelectDistinct() throws ExecutionException, InterruptedException TstUtils.assertTableEquals(expected2, distinct3); TstUtils.assertTableEquals(expected2, prevTable(distinct3)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(expected2outOfOrder, distinct1); TstUtils.assertTableEquals(expected2outOfOrder, distinct2); @@ -1085,7 +1084,7 @@ public void testSelectDistinctReset() throws ExecutionException, InterruptedExce final Table slowed = table.updateView("z=barrierFunction.apply(y)"); final Table expected1 = newTable(col("z", "a", "b")); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Callable
callable = () -> slowed.selectDistinct("z"); @@ -1098,7 +1097,7 @@ public void testSelectDistinctReset() throws ExecutionException, InterruptedExce barrierFunction.sleepDuration = 0; - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); final Table distinct1 = future1.get(TIMEOUT_LENGTH, TIMEOUT_UNIT); TstUtils.assertTableEquals(expected1, distinct1); @@ -1219,12 +1218,12 @@ private void testByConcurrent(Function function, boolean hasKeys, // We only care about the silent version of this table, as it's just a vessel to tick and ensure that the // resultant table // is computed using the appropriate version. - final Table expected1 = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> function.apply(table.silent()).select()); - final Table expected2 = - UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> function.apply(table2)); + final Table expected1 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> function.apply(table.silent()).select()); + final Table expected2 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .computeLocked(() -> function.apply(table2)); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Future
future1 = pool.submit(callable); final Table result1; @@ -1276,8 +1275,10 @@ private void testByConcurrent(Function function, boolean hasKeys, final Future
future3 = pool.submit(callable); if (withReset) { - while (((QueryTable) slowed).getLastNotificationStep() != LogicalClock.DEFAULT.currentStep()) { - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + while (((QueryTable) slowed).getLastNotificationStep() != ExecutionContext.getContext().getUpdateGraph() + .clock().currentStep()) { + ExecutionContext.getContext().getUpdateGraph().cast() + .flushOneNotificationForUnitTests(); } } final Table result3 = future3.get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -1298,7 +1299,7 @@ private void testByConcurrent(Function function, boolean hasKeys, TableTools.show(expected2); TstUtils.assertTableEquals(expected2, result3, TableDiff.DiffItems.DoublesExact); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); if (hasKeys) { TstUtils.assertTableEquals(expected2.sort("KeyColumn"), result1.sort("KeyColumn"), @@ -1343,12 +1344,12 @@ private void testPartitionByConcurrent(boolean withReset) throws Exception { // We only care about the silent version of this table, as it's just a vessel to tick and ensure that the // resultant table // is computed using the appropriate version. - final Table expected1 = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.silent().partitionBy("KeyColumn").merge().select()); - final Table expected2 = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> table2.silent().partitionBy("KeyColumn").merge().select()); + final Table expected1 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> table.silent().partitionBy("KeyColumn").merge().select()); + final Table expected2 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> table2.silent().partitionBy("KeyColumn").merge().select()); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Future future1 = pool.submit(callable); final PartitionedTable result1; @@ -1393,8 +1394,10 @@ private void testPartitionByConcurrent(boolean withReset) throws Exception { final Future future3 = pool.submit(callable); if (withReset) { - while (((QueryTable) slowed).getLastNotificationStep() != LogicalClock.DEFAULT.currentStep()) { - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + while (((QueryTable) slowed).getLastNotificationStep() != ExecutionContext.getContext().getUpdateGraph() + .clock().currentStep()) { + ExecutionContext.getContext().getUpdateGraph().cast() + .flushOneNotificationForUnitTests(); } } final PartitionedTable result3 = future3.get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -1413,7 +1416,7 @@ private void testPartitionByConcurrent(boolean withReset) throws Exception { assertNull(result3c); TstUtils.assertTableEquals(expected2.where("KeyColumn = `d`"), result3d); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(expected2, result1.merge()); TstUtils.assertTableEquals(expected2, result2.merge()); @@ -1533,10 +1536,10 @@ public void testConstructSnapshotException() throws ExecutionException, Interrup SleepUtil.sleep(100); // add a row to the table - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); TstUtils.addToTable(table, i(10), col("y", "e")); table.notifyListeners(i(10), i(), i()); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // now get the answer final String[] answer = future.get(5000, TimeUnit.MILLISECONDS); @@ -1553,7 +1556,7 @@ public void testStaticSnapshot() throws ExecutionException, InterruptedException TableTools.newTable(col("x", 1, 4, 2, 3), col("y", "a", "d", "b", "c"), col("z", true, true, false, true)); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); final Table snap1 = pool.submit(() -> table.snapshot()).get(TIMEOUT_LENGTH, TIMEOUT_UNIT); @@ -1573,7 +1576,7 @@ public void testStaticSnapshot() throws ExecutionException, InterruptedException TstUtils.assertTableEquals(tableStart, prevTable(snap1)); TstUtils.assertTableEquals(tableStart, prevTable(snap2)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TstUtils.assertTableEquals(tableStart, snap1); TstUtils.assertTableEquals(tableStart, snap2); @@ -1597,15 +1600,16 @@ public void testSnapshotLiveness() { TstUtils.assertTableEquals(snap, base); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final TableUpdate downstream = new TableUpdateImpl(i(1), i(), i(), + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + final TableUpdate downstream1 = new TableUpdateImpl(i(1), i(), i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY); - TstUtils.addToTable(base, downstream.added(), col("x", 2)); - base.notifyListeners(downstream); + TstUtils.addToTable(base, downstream1.added(), col("x", 2)); + base.notifyListeners(downstream1); }); TstUtils.assertTableEquals(snap, prevTable(base)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final TableUpdate downstream = new TableUpdateImpl(i(1), i(), i(), RowSetShiftData.EMPTY, ModifiedColumnSet.EMPTY); TstUtils.addToTable(trigger, downstream.added()); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestDownsampledWhereFilter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestDownsampledWhereFilter.java index daf022b1715..f1337c372a5 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestDownsampledWhereFilter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestDownsampledWhereFilter.java @@ -3,16 +3,14 @@ */ package io.deephaven.engine.table.impl; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.generator.DoubleGenerator; -import io.deephaven.engine.testutil.generator.SortedInstantGenerator; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.time.DateTimeUtils; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.select.DownsampledWhereFilter; -import io.deephaven.util.SafeCloseable; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; +import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import org.junit.Test; import java.util.Random; @@ -20,17 +18,8 @@ import static io.deephaven.engine.testutil.TstUtils.*; public class TestDownsampledWhereFilter { - private SafeCloseable executionContext; - - @Before - public void setUp() throws Exception { - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @After - public void tearDown() throws Exception { - executionContext.close(); - } + @Rule + public final EngineCleanup framework = new EngineCleanup(); @Test public void testDownsampledWhere() { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestFormulaArrayEvaluation.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestFormulaArrayEvaluation.java index eb7d8784591..60dd4fc8fde 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestFormulaArrayEvaluation.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestFormulaArrayEvaluation.java @@ -9,6 +9,7 @@ import com.github.javaparser.ast.expr.ConditionalExpr; import com.github.javaparser.ast.expr.Expression; import io.deephaven.base.Pair; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetShiftData; @@ -20,13 +21,13 @@ import io.deephaven.engine.table.impl.sources.SingleValueColumnSource; import io.deephaven.engine.table.impl.sources.ViewColumnSource; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.TimeLiteralReplacedExpression; @@ -75,7 +76,8 @@ public void testViewIncrementalSimpleTest() { EvalNugget.from(() -> queryTable.view("newCol=Value / 2", "newCol2=newCol_[i-2] * 4")), EvalNugget.from(() -> queryTable.view("newCol=Value / 2", "newCol2=newCol_[i+2] * 4")), }; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(10, 12, 18), intCol("Sentinel", 56, 57, 510), @@ -1350,7 +1352,9 @@ public void dh12273_simpleIncrementalRefreshingTableWhereTest() { TstUtils.validate("", en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + /* ModifiedColumnSet.ALL */ + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(10, 12, 18), intCol("Sentinel", 56, 57, 510), diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestKeyedTableListener.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestKeyedTableListener.java index db4a4a1eb54..a361a833af7 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestKeyedTableListener.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestKeyedTableListener.java @@ -3,33 +3,38 @@ */ package io.deephaven.engine.table.impl; -import io.deephaven.base.testing.BaseCachedJMockTestCase; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.tuple.ArrayTuple; import static io.deephaven.engine.util.TableTools.*; -public class TestKeyedTableListener extends BaseCachedJMockTestCase { +public class TestKeyedTableListener extends RefreshingTableTestCase { private QueryTable table; private KeyedTableListener keyedTableListener; private KeyedTableListener.KeyUpdateListener mockListener; - private final RowSet noAdded = RowSetFactory.empty(); - private final RowSet noRemoved = RowSetFactory.empty(); - private final RowSet noModified = RowSetFactory.empty(); + private RowSet noAdded; + private RowSet noRemoved; + private RowSet noModified; private ArrayTuple aKey; private ArrayTuple bKey; private ArrayTuple cKey; @Override - public void setUp() { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() throws Exception { + super.setUp(); + + this.noAdded = RowSetFactory.empty(); + this.noRemoved = RowSetFactory.empty(); + this.noModified = RowSetFactory.empty(); + this.mockListener = mock(KeyedTableListener.KeyUpdateListener.class); this.table = TstUtils.testRefreshingTable(TstUtils.i(0, 1, 2).toTracking(), col("Key1", "A", "B", "C"), @@ -40,13 +45,8 @@ public void setUp() { this.cKey = new ArrayTuple("C", 3); this.keyedTableListener = new KeyedTableListener(table, "Key1", "Key2"); // enable immediately - UpdateGraphProcessor.DEFAULT.sharedLock().doLocked(() -> this.keyedTableListener.addUpdateListener()); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + ExecutionContext.getContext().getUpdateGraph().sharedLock() + .doLocked(() -> this.keyedTableListener.addUpdateListener()); } public void testGetRow() { @@ -77,7 +77,8 @@ public void testNoChanges() { keyedTableListener.subscribe(bKey, mockListener); keyedTableListener.subscribe(cKey, mockListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle( () -> table.notifyListeners(noAdded.copy(), noRemoved.copy(), noModified.copy())); keyedTableListener.unsubscribe(aKey, mockListener); @@ -95,7 +96,8 @@ public void testAdd() { }); keyedTableListener.subscribe(newKey, mockListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newAdd = TstUtils.i(3); TstUtils.addToTable(table, newAdd, col("Key1", "D"), col("Key2", 4), col("Data", 4.0)); table.notifyListeners(newAdd, noRemoved.copy(), noModified.copy()); @@ -117,7 +119,8 @@ public void testRemoved() { }); keyedTableListener.subscribe(cKey, mockListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newRemove = TstUtils.i(2); TstUtils.removeRows(table, newRemove); table.notifyListeners(noAdded.copy(), newRemove, noModified.copy()); @@ -143,7 +146,8 @@ public void testModify() { assertEquals(3.0, vals[2]); keyedTableListener.subscribe(cKey, mockListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newModified = TstUtils.i(2); TstUtils.addToTable(table, newModified, col("Key1", "C"), col("Key2", 3), col("Data", 6.0)); @@ -172,7 +176,9 @@ public void testModifyChangedKey() { keyedTableListener.subscribe(cKey, mockListener); keyedTableListener.subscribe(newKey, mockListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Add to table on an existing row key is a modify + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newModified = TstUtils.i(2); // Add to table on an existing row key is a modify TstUtils.addToTable(table, newModified, col("Key1", "C"), col("Key2", 4), @@ -213,7 +219,9 @@ public void testModifyKeyMoved() { keyedTableListener.subscribe(cKey, mockListener); keyedTableListener.subscribe(newKey, mockListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Add to table on an existing row key is a modify + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newModified = TstUtils.i(1, 2); // Add to table on an existing row key is a modify TstUtils.addToTable(table, newModified, col("Key1", "C", "D"), col("Key2", 3, 4), @@ -254,7 +262,8 @@ public void testModifySwap() { keyedTableListener.subscribe(bKey, mockListener); keyedTableListener.subscribe(cKey, mockListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newModified = TstUtils.i(1, 2); TstUtils.addToTable(table, newModified, col("Key1", "C", "B"), col("Key2", 3, 2), col("Data", 3.0, 2.0)); @@ -294,7 +303,8 @@ public void testAddRemoveModify() { keyedTableListener.subscribe(cKey, mockListener); keyedTableListener.subscribe(newKey, mockListener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newRemoved = TstUtils.i(2); TstUtils.removeRows(table, newRemoved); @@ -346,14 +356,15 @@ public void testRemoveAdd() { keyedTableListener.subscribe(newKey, mockListener); // Two cycles -- first remove - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newRemoved = TstUtils.i(2); TstUtils.removeRows(table, newRemoved); table.notifyListeners(noAdded.copy(), newRemoved, noModified.copy()); }); // Now add - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet newAdded = TstUtils.i(2); TstUtils.addToTable(table, newAdded, col("Key1", "D"), col("Key2", 4), col("Data", 4.0)); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestListenerFailure.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestListenerFailure.java index a80e274e4f6..35299cfbe69 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestListenerFailure.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestListenerFailure.java @@ -3,10 +3,11 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.select.FormulaEvaluationException; import io.deephaven.engine.rowset.RowSet; @@ -21,12 +22,13 @@ public class TestListenerFailure extends RefreshingTableTestCase { public void testListenerFailure() { final QueryTable source = TstUtils.testRefreshingTable(col("Str", "A", "B")); - final Table updated = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> source.update("UC=Str.toUpperCase()")); + final Table updated = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> source.update("UC=Str.toUpperCase()")); TableTools.showWithRowSet(updated); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(source, i(2, 3), col("Str", "C", "D")); source.notifyListeners(i(2, 3), i(), i()); }); @@ -34,7 +36,7 @@ public void testListenerFailure() { assertFalse(updated.isFailed()); allowingError(() -> { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(source, i(4, 5), col("Str", "E", null)); source.notifyListeners(i(4, 5), i(), i()); }); @@ -84,7 +86,8 @@ public void testMemoCheck() { TableTools.showWithRowSet(filtered); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(source, i(2, 3), col("Str", "C", "D")); source.notifyListeners(i(2, 3), i(), i()); }); @@ -95,7 +98,7 @@ public void testMemoCheck() { assertSame(filtered, filteredAgain); allowingError(() -> { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(source, i(4, 5), col("Str", "E", null)); source.notifyListeners(i(4, 5), i(), i()); }); @@ -105,7 +108,7 @@ public void testMemoCheck() { assertTrue(filtered.isFailed()); assertTrue(filteredAgain.isFailed()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(source, i(5)); source.notifyListeners(i(), i(5), i()); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestMapCodecColumns.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestMapCodecColumns.java index 3ea088eabf6..ea27ae6c40d 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestMapCodecColumns.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestMapCodecColumns.java @@ -9,11 +9,14 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.parquet.table.ParquetTools; import io.deephaven.engine.util.TableTools; import io.deephaven.parquet.table.ParquetInstructions; import io.deephaven.util.codec.*; import junit.framework.TestCase; +import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.io.File; @@ -65,40 +68,49 @@ public class TestMapCodecColumns { INT_MAP_COLUMN_DEFINITION, LONG_MAP_COLUMN_DEFINITION); - @SuppressWarnings("unchecked") - private static final Table TABLE = TableTools.newTable(TABLE_DEFINITION, - TableTools.col("StrStrMap", CollectionUtil.mapFromArray(String.class, String.class, "AK", "AV", "BK", "BV"), - null, Collections.singletonMap("Key", "Value")), - TableTools.col("StrBoolMap", - CollectionUtil.mapFromArray(String.class, Boolean.class, "True", true, "False", false, "Null", - null), - null, Collections.singletonMap("Truthiness", true)), - TableTools.col("StrDoubleMap", - CollectionUtil.mapFromArray(String.class, Double.class, "One", 1.0, "Two", 2.0, "Null", null), null, - Collections.singletonMap("Pi", Math.PI)), - TableTools.col("StrFloatMap", - CollectionUtil.mapFromArray(String.class, Float.class, "Ten", 10.0f, "Twenty", 20.0f, "Null", null), - null, Collections.singletonMap("e", (float) Math.E)), - TableTools.col("StrIntMap", - CollectionUtil.mapFromArray(String.class, Integer.class, "Million", 1_000_000, "Billion", - 1_000_000_000, "Null", null), - null, Collections.singletonMap("Negative", -1)), - TableTools - .col("StrLongMap", - CollectionUtil.mapFromArray(String.class, Long.class, "Trillion", 1_000_000_000_000L, - "Billion", 1_000_000_000L, "Null", null), - null, Collections.singletonMap("Negative", -1L))); + @Rule + public final EngineCleanup base = new EngineCleanup(); + + private Table table; + + @Before + public void setUp() { + table = TableTools.newTable(TABLE_DEFINITION, + TableTools.col("StrStrMap", + CollectionUtil.mapFromArray(String.class, String.class, "AK", "AV", "BK", "BV"), + null, Collections.singletonMap("Key", "Value")), + TableTools.col("StrBoolMap", + CollectionUtil.mapFromArray(String.class, Boolean.class, "True", true, "False", false, "Null", + null), + null, Collections.singletonMap("Truthiness", true)), + TableTools.col("StrDoubleMap", + CollectionUtil.mapFromArray(String.class, Double.class, "One", 1.0, "Two", 2.0, "Null", null), + null, + Collections.singletonMap("Pi", Math.PI)), + TableTools.col("StrFloatMap", + CollectionUtil.mapFromArray(String.class, Float.class, "Ten", 10.0f, "Twenty", 20.0f, "Null", + null), + null, Collections.singletonMap("e", (float) Math.E)), + TableTools.col("StrIntMap", + CollectionUtil.mapFromArray(String.class, Integer.class, "Million", 1_000_000, "Billion", + 1_000_000_000, "Null", null), + null, Collections.singletonMap("Negative", -1)), + TableTools.col("StrLongMap", + CollectionUtil.mapFromArray(String.class, Long.class, "Trillion", 1_000_000_000_000L, + "Billion", 1_000_000_000L, "Null", null), + null, Collections.singletonMap("Negative", -1L))); + } @Test public void doColumnsTest() throws IOException { final File dir = Files.createTempDirectory(Paths.get(""), "CODEC_TEST").toFile(); final File dest = new File(dir, "Table.parquet"); try { - ParquetTools.writeTable(TABLE, dest, TABLE.getDefinition(), writeInstructions); + ParquetTools.writeTable(table, dest, table.getDefinition(), writeInstructions); final Table result = ParquetTools.readTable(dest); TableTools.show(result); TestCase.assertEquals(TABLE_DEFINITION, result.getDefinition()); - TstUtils.assertTableEquals(TABLE, result); + TstUtils.assertTableEquals(table, result); } finally { FileUtils.deleteRecursively(dir); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestMoveColumns.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestMoveColumns.java index ef4c7c38289..e3f7109dad4 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestMoveColumns.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestMoveColumns.java @@ -3,35 +3,24 @@ */ package io.deephaven.engine.table.impl; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; -import io.deephaven.util.SafeCloseable; -import junit.framework.TestCase; import java.util.stream.Collectors; -public class TestMoveColumns extends TestCase { +public class TestMoveColumns extends RefreshingTableTestCase { private Table table; private int numCols; - private SafeCloseable executionContext; - @Override - protected void setUp() throws Exception { + public void setUp() throws Exception { super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); table = TableTools.emptyTable(1).update("a=1", "b=2", "c=3", "d=4", "e=5"); numCols = table.numColumns(); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } - public void testMoveColumns() { // Basic moving Table temp = table.moveColumns(0, "a"); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestPartitionAwareSourceTable.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestPartitionAwareSourceTable.java index e0d1c12f615..d162aca1717 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestPartitionAwareSourceTable.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestPartitionAwareSourceTable.java @@ -6,15 +6,16 @@ import io.deephaven.base.Pair; import io.deephaven.base.verify.Assert; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.*; import io.deephaven.engine.table.impl.perf.PerformanceEntry; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TestErrorNotification; import io.deephaven.engine.testutil.TestNotification; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.locations.*; import io.deephaven.engine.table.impl.locations.impl.SimpleTableLocationKey; import io.deephaven.engine.table.impl.locations.impl.TableLocationSubscriptionBuffer; @@ -150,7 +151,7 @@ public void setUp() throws Exception { expectedRowSet = RowSetFactory.empty(); SUT = new PartitionAwareSourceTable(TABLE_DEFINITION, "", componentFactory, locationProvider, - UpdateGraphProcessor.DEFAULT); + ExecutionContext.getContext().getUpdateGraph()); assertIsSatisfied(); } @@ -267,7 +268,7 @@ public Object invoke(Invocation invocation) { if (coalesceAndListen) { if (ciType == ConcurrentInstantiationType.UpdatingClosed || ciType == ConcurrentInstantiationType.UpdatingOpen) { - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); } try { coalesced = SUT.coalesce(); @@ -286,7 +287,8 @@ public Object invoke(Invocation invocation) { assertIsSatisfied(); assertRowSetEquals(expectedRowSet, SUT.getRowSet()); if (ciType == ConcurrentInstantiationType.UpdatingClosed) { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .completeCycleForUnitTests(); } } } @@ -334,13 +336,13 @@ public Object invoke(Invocation invocation) { }); notification.reset(); - if (LogicalClock.DEFAULT.currentState() == LogicalClock.State.Idle) { - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + if (ExecutionContext.getContext().getUpdateGraph().clock().currentState() == LogicalClock.State.Idle) { + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); } try { SUT.refresh(); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } assertIsSatisfied(); notification.assertInvoked(); @@ -357,7 +359,8 @@ private void doRefreshUnchangedCheck() { }); notification.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(SUT::refresh); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(SUT::refresh); assertIsSatisfied(); notification.assertNotInvoked(); @@ -383,7 +386,8 @@ public Object invoke(Invocation invocation) { }); errorNotification.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(SUT::refresh); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(SUT::refresh); assertIsSatisfied(); errorNotification.assertInvoked(); @@ -405,7 +409,7 @@ private void doAddLocationsRefreshCheck(final ImmutableTableLocationKey[] tableL @Test public void testRedefinition() { - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(this::doTestRedefinition); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(this::doTestRedefinition); } private void doTestRedefinition() { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestPartitionBy.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestPartitionBy.java index 136cc1c80e3..b3669ff09f5 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestPartitionBy.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestPartitionBy.java @@ -4,6 +4,7 @@ package io.deephaven.engine.table.impl; import io.deephaven.api.filter.Filter; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.liveness.SingletonLivenessManager; import io.deephaven.engine.table.PartitionedTable; @@ -12,7 +13,6 @@ import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.liveness.LivenessScope; @@ -143,7 +143,8 @@ public void testErrorPropagation() { assertTableEquals(tableA, table.where("Key=`A`")); assertTableEquals(tableB, table.where("Key=`B`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 8)); table.notifyListeners(i(8), i(), i()); }); @@ -151,7 +152,7 @@ public void testErrorPropagation() { assertTableEquals(tableA, table.where("Key=`A`")); assertTableEquals(tableB, table.where("Key=`B`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 10)); }); @@ -163,7 +164,7 @@ public void testErrorPropagation() { assertNull(listenerA.originalException()); assertNull(listenerB.originalException()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(table, i(8)); table.notifyListeners(i(), i(8), i()); }); @@ -199,7 +200,8 @@ public void testNewKeysAfterResultReleased() { assertTableEquals(tableA, table.where("Key=`A`")); assertTableEquals(tableB, table.where("Key=`B`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 8)); table.notifyListeners(i(8), i(), i()); }); @@ -207,7 +209,9 @@ public void testNewKeysAfterResultReleased() { assertTableEquals(tableA, table.where("Key=`A`")); assertTableEquals(tableB, table.where("Key=`B`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Added row, wants to make new + // state + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(9), col("Key", "C"), intCol("Int", 10)); // Added row, wants to make new // state table.notifyListeners(i(9), i(), i()); @@ -217,7 +221,8 @@ public void testNewKeysAfterResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); expectLivenessException(() -> byKey.constituentFor("C")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Modified row, wants to move from existent state to nonexistent state + updateGraph.runWithinUnitTestCycle(() -> { // Modified row, wants to move from existent state to nonexistent state TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 11)); table.notifyListeners(i(), i(), i(8)); @@ -227,7 +232,8 @@ public void testNewKeysAfterResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); expectLivenessException(() -> byKey.constituentFor("C")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Modified row, staying in nonexistent state + updateGraph.runWithinUnitTestCycle(() -> { // Modified row, staying in nonexistent state TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 12)); table.notifyListeners(i(), i(), i(8)); @@ -237,7 +243,8 @@ public void testNewKeysAfterResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); expectLivenessException(() -> byKey.constituentFor("C")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Modified row, wants to move from nonexistent state to existent state + updateGraph.runWithinUnitTestCycle(() -> { // Modified row, wants to move from nonexistent state to existent state TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 13)); table.notifyListeners(i(), i(), i(8)); @@ -247,7 +254,8 @@ public void testNewKeysAfterResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); expectLivenessException(() -> byKey.constituentFor("C")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Modified row, staying in existent state + updateGraph.runWithinUnitTestCycle(() -> { // Modified row, staying in existent state TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 14)); table.notifyListeners(i(), i(), i(8)); @@ -257,7 +265,8 @@ public void testNewKeysAfterResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); expectLivenessException(() -> byKey.constituentFor("C")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Removed row from a nonexistent state + updateGraph.runWithinUnitTestCycle(() -> { // Removed row from a nonexistent state TstUtils.removeRows(table, i(9)); table.notifyListeners(i(), i(9), i()); @@ -267,7 +276,8 @@ public void testNewKeysAfterResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); expectLivenessException(() -> byKey.constituentFor("C")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Removed row from an existent state + updateGraph.runWithinUnitTestCycle(() -> { // Removed row from an existent state TstUtils.removeRows(table, i(8)); table.notifyListeners(i(), i(8), i()); @@ -294,7 +304,8 @@ public void testNewKeysBeforeResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); assertNull(byKey.constituentFor("C")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 8)); table.notifyListeners(i(8), i(), i()); }); @@ -303,7 +314,8 @@ public void testNewKeysBeforeResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); assertNull(byKey.constituentFor("C")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Added row, makes new state + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(9), col("Key", "C"), intCol("Int", 10)); // Added row, makes new state table.notifyListeners(i(9), i(), i()); }); @@ -313,10 +325,9 @@ public void testNewKeysBeforeResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); assertTableEquals(tableC, table.where("Key=`C`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 11)); // Modified row, wants to move - // from original state to new - // state + updateGraph.runWithinUnitTestCycle(() -> { + // Modified row, wants to move from original state to new state + TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 11)); table.notifyListeners(i(), i(), i(8)); }); @@ -324,9 +335,9 @@ public void testNewKeysBeforeResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); assertTableEquals(tableC, table.where("Key=`C`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 12)); // Modified row, staying in new - // state + updateGraph.runWithinUnitTestCycle(() -> { + // Modified row, staying in new state + TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 12)); table.notifyListeners(i(), i(), i(8)); }); @@ -334,10 +345,9 @@ public void testNewKeysBeforeResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); assertTableEquals(tableC, table.where("Key=`C`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 13)); // Modified row, wants to move - // from new state to original - // state + updateGraph.runWithinUnitTestCycle(() -> { + // Modified row, wants to move from new state to original state + TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 13)); table.notifyListeners(i(), i(), i(8)); }); @@ -345,9 +355,9 @@ public void testNewKeysBeforeResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); assertTableEquals(tableC, table.where("Key=`C`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 14)); // Modified row, staying in - // original state + updateGraph.runWithinUnitTestCycle(() -> { + // Modified row, staying in original state + TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 14)); table.notifyListeners(i(), i(), i(8)); }); @@ -355,8 +365,9 @@ public void testNewKeysBeforeResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); assertTableEquals(tableC, table.where("Key=`C`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.removeRows(table, i(9)); // Removed row from a new state + updateGraph.runWithinUnitTestCycle(() -> { + // Removed row from a new state + TstUtils.removeRows(table, i(9)); table.notifyListeners(i(), i(9), i()); }); @@ -364,8 +375,9 @@ public void testNewKeysBeforeResultReleased() { assertTableEquals(tableB, table.where("Key=`B`")); assertTableEquals(tableC, table.where("Key=`C`")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.removeRows(table, i(8)); // Removed row from an original state + updateGraph.runWithinUnitTestCycle(() -> { + // Removed row from an original state + TstUtils.removeRows(table, i(8)); table.notifyListeners(i(), i(8), i()); }); @@ -417,7 +429,8 @@ public void testReleaseRaceRollup() { } final MutableLong start = new MutableLong(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(rawTable, i(8), col("Key", "C"), @@ -430,7 +443,7 @@ public void testReleaseRaceRollup() { final MutableObject> mutableFuture = new MutableObject<>(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(rawTable, i(10, 11, 12), col("Key", "C", "D", "E"), @@ -505,10 +518,10 @@ private void testPartitionByWithShifts(int seed) { final Table source = TableTools.merge(simpleTable, queryTable.updateView("K=k")).flatten(); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - EvalNugget.Sorted.from(() -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> source.partitionBy("Sym").merge()), "Sym"), - EvalNugget.Sorted.from(() -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> source.where("Sym=`a`").partitionBy("Sym").merge()), "Sym"), + EvalNugget.Sorted.from(() -> ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> source.partitionBy("Sym").merge()), "Sym"), + EvalNugget.Sorted.from(() -> ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> source.where("Sym=`a`").partitionBy("Sym").merge()), "Sym"), }; final int steps = 50; diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSelectOverheadLimiter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSelectOverheadLimiter.java index 1f92827a160..b3a2588a71c 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSelectOverheadLimiter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSelectOverheadLimiter.java @@ -3,16 +3,14 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.liveness.SingletonLivenessManager; @@ -23,6 +21,7 @@ import io.deephaven.util.SafeCloseable; import java.util.Random; + import org.junit.experimental.categories.Category; import static io.deephaven.engine.testutil.TstUtils.*; @@ -33,39 +32,41 @@ public void testSelectOverheadLimiter() { final QueryTable queryTable = TstUtils.testRefreshingTable( RowSetFactory.fromRange(0, 100).toTracking()); final Table sentinelTable = queryTable.updateView("Sentinel=k"); - final Table densified = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); + + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Table densified = updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); assertEquals(densified.getRowSet(), sentinelTable.getRowSet()); assertTableEquals(sentinelTable, densified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet added = RowSetFactory.fromRange(10000, 11000); - queryTable.getRowSet().writableCast().insert(added); - queryTable.notifyListeners(added, i(), i()); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet added3 = RowSetFactory.fromRange(10000, 11000); + queryTable.getRowSet().writableCast().insert(added3); + queryTable.notifyListeners(added3, i(), i()); }); assertEquals(sentinelTable.getRowSet(), densified.getRowSet()); assertTableEquals(sentinelTable, densified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet added = RowSetFactory.fromRange(11001, 11100); - queryTable.getRowSet().writableCast().insert(added); - queryTable.notifyListeners(added, i(), i()); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet added2 = RowSetFactory.fromRange(11001, 11100); + queryTable.getRowSet().writableCast().insert(added2); + queryTable.notifyListeners(added2, i(), i()); }); assertEquals(sentinelTable.getRowSet(), densified.getRowSet()); assertTableEquals(sentinelTable, densified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet added = RowSetFactory.fromRange(20000, 20100); - queryTable.getRowSet().writableCast().insert(added); - queryTable.notifyListeners(added, i(), i()); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet added1 = RowSetFactory.fromRange(20000, 20100); + queryTable.getRowSet().writableCast().insert(added1); + queryTable.notifyListeners(added1, i(), i()); }); assertEquals(sentinelTable.getRowSet(), densified.getRowSet()); assertTableEquals(sentinelTable, densified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet added = RowSetFactory.fromRange(30000, 30100); queryTable.getRowSet().writableCast().insert(added); queryTable.notifyListeners(added, i(), i()); @@ -79,15 +80,16 @@ public void testShift() { final QueryTable queryTable = TstUtils.testRefreshingTable( RowSetFactory.fromRange(0, 100).toTracking()); final Table sentinelTable = queryTable.updateView("Sentinel=ii"); - final Table densified = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Table densified = updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); assertEquals(densified.getRowSet(), sentinelTable.getRowSet()); assertTableEquals(sentinelTable, densified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet removed = RowSetFactory.fromRange(0, 100); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet removed1 = RowSetFactory.fromRange(0, 100); final RowSet added = RowSetFactory.fromRange(10000, 10100); - queryTable.getRowSet().writableCast().update(added, removed); + queryTable.getRowSet().writableCast().update(added, removed1); final TableUpdateImpl update = new TableUpdateImpl(); final RowSetShiftData.Builder builder = new RowSetShiftData.Builder(); builder.shiftRange(0, 1000, 10000); @@ -102,7 +104,7 @@ public void testShift() { assertEquals(sentinelTable.getRowSet(), densified.getRowSet()); assertTableEquals(sentinelTable, densified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet removed = RowSetFactory.fromRange(10000, 10100); queryTable.getRowSet().writableCast().remove(removed); queryTable.notifyListeners(i(), removed, i()); @@ -138,45 +140,39 @@ private void testPartitionBy(int seed) { final QueryTable queryTable = getTable(size, random, columnInfo); final Table simpleTable = TableTools.newTable(TableTools.col("Sym", "a"), TableTools.intCol("intCol", 30), TableTools.doubleCol("doubleCol", 40.1)).updateView("K=-2L"); - final Table source = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.merge(simpleTable, queryTable.updateView("K=k")).flatten()); + final UpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph(); + final Table source = updateGraph.sharedLock().computeLocked( + () -> TableTools.merge(simpleTable, queryTable.updateView("K=k")).flatten()); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - EvalNugget.Sorted.from( - () -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( - () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), - 2.0)), + EvalNugget.Sorted.from(() -> updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), 2.0)), "Sym"), - EvalNugget.Sorted.from(() -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.partitionBy("Sym").merge(), 2.0).select()), + EvalNugget.Sorted.from(() -> updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), 2.0) + .select()), "Sym"), - EvalNugget.Sorted.from( - () -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( - () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), - 4.0)), + EvalNugget.Sorted.from(() -> updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), 4.0)), "Sym"), - EvalNugget.Sorted.from( - () -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( - () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), - 4.5)), + EvalNugget.Sorted.from(() -> updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), 4.5)), "Sym"), - EvalNugget.Sorted.from(() -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.partitionBy("Sym").merge(), 4.5).select()), + EvalNugget.Sorted.from(() -> updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), 4.5) + .select()), "Sym"), - EvalNugget.Sorted.from( - () -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( - () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), - 5.0)), + EvalNugget.Sorted.from(() -> updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), 5.0)), "Sym"), - EvalNugget.Sorted.from(() -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.partitionBy("Sym").merge(), 10.0).select()), + EvalNugget.Sorted.from( + () -> updateGraph.sharedLock() + .computeLocked(() -> SelectOverheadLimiter + .clampSelectOverhead(source.partitionBy("Sym").merge(), 10.0).select()), "Sym"), - EvalNugget.Sorted.from(() -> UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.partitionBy("Sym").merge(), 10.0).select()), + EvalNugget.Sorted.from(() -> updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.partitionBy("Sym").merge(), 10.0) + .select()), "Sym"), }; @@ -194,27 +190,28 @@ public void testScope() { RowSetFactory.fromRange(0, 100).toTracking()); final SafeCloseable scopeCloseable = LivenessScopeStack.open(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); final Table sentinelTable = queryTable.updateView("Sentinel=k"); - final Table densified = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); + final Table densified = updateGraph.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); assertEquals(densified.getRowSet(), sentinelTable.getRowSet()); assertTableEquals(sentinelTable, densified); final SingletonLivenessManager densifiedManager = new SingletonLivenessManager(densified); - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(scopeCloseable::close); + updateGraph.exclusiveLock().doLocked(scopeCloseable::close); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final RowSet added = RowSetFactory.fromRange(10000, 11000); - queryTable.getRowSet().writableCast().insert(added); - queryTable.notifyListeners(added, i(), i()); + updateGraph.runWithinUnitTestCycle(() -> { + final RowSet added1 = RowSetFactory.fromRange(10000, 11000); + queryTable.getRowSet().writableCast().insert(added1); + queryTable.notifyListeners(added1, i(), i()); }); assertEquals(sentinelTable.getRowSet(), densified.getRowSet()); assertTableEquals(sentinelTable, densified); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet added = RowSetFactory.fromRange(11001, 11100); queryTable.getRowSet().writableCast().insert(added); queryTable.notifyListeners(added, i(), i()); @@ -229,7 +226,7 @@ public void testScope() { densified.dropReference(); sentinelTable.dropReference(); - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(densifiedManager::release); + updateGraph.exclusiveLock().doLocked(densifiedManager::release); org.junit.Assert.assertFalse(densified.tryRetainReference()); org.junit.Assert.assertFalse(sentinelTable.tryRetainReference()); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSharedContext.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSharedContext.java index edb29d8c46c..799d30057dd 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSharedContext.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSharedContext.java @@ -5,12 +5,12 @@ import static io.deephaven.engine.testutil.TstUtils.getTable; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ResettableContext; import io.deephaven.engine.table.SharedContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.generator.TestDataGenerator; import io.deephaven.engine.testutil.generator.IntGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.junit.Rule; import org.junit.Test; @@ -99,7 +99,7 @@ public void testConditionFilterWithSimpleRedirections() { final String condition = String.join(" && ", conditions); final QueryTable t0 = getTable(size, random, initColumnInfos(cols, gs)); final String sortCol = "TS"; - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(() -> { + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(() -> { final Table t1 = t0.update(sortCol + "=i").reverse(); final Table t1Filtered = t1.where(condition); final Table t2 = t1.sort(sortCol); @@ -145,7 +145,7 @@ public void testConditionFilterWithMoreComplexRedirections() { final QueryTable t0 = getTable(size, random, initColumnInfos(cols, gs)); final String sortCol = "TS"; final String formulaCol = "F"; - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(() -> { + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(() -> { final Table t1 = t0.update(sortCol + "=i", formulaCol + "=" + cols[0] + "+" + cols[1]).reverse(); final Table t1Filtered = t1.where(condition); final Table t2 = t1.sort(sortCol).naturalJoin(t1, sortCol, String.join(",", joinColumnsToAdd)); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSimpleSourceTable.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSimpleSourceTable.java index 05645160ff5..c4559d5e85b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSimpleSourceTable.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSimpleSourceTable.java @@ -5,11 +5,11 @@ import io.deephaven.base.Pair; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.locations.TableDataException; import io.deephaven.engine.table.impl.locations.TableLocation; import io.deephaven.engine.table.impl.locations.TableLocationProvider; @@ -199,7 +199,7 @@ private void doSingleLocationInitializeCheck(final boolean throwException, @Test public void testRedefinition() { - UpdateGraphProcessor.DEFAULT.exclusiveLock().doLocked(this::doTestRedefinition); + ExecutionContext.getContext().getUpdateGraph().exclusiveLock().doLocked(this::doTestRedefinition); } private void doTestRedefinition() { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSort.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSort.java index fdda3105f7f..46242faeadc 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSort.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSort.java @@ -3,14 +3,10 @@ */ package io.deephaven.engine.table.impl; -import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.engine.context.QueryCompiler; -import io.deephaven.configuration.Configuration; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.exceptions.NotSortableException; import io.deephaven.engine.table.DataColumn; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -26,38 +22,11 @@ import java.util.Map; import java.util.function.BiFunction; -import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; import org.junit.experimental.categories.Category; @Category(OutOfBandTest.class) -public class TestSort extends BaseArrayTestCase { - - private static final boolean ENABLE_QUERY_COMPILER_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(TestSort.class, "QueryCompiler.logEnabled", false); - - private boolean lastMemoize = false; - private boolean oldQueryCompilerLogEnabled; - private SafeCloseable executionContext; - - @Override - protected void setUp() throws Exception { - super.setUp(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - lastMemoize = QueryTable.setMemoizeResults(false); - oldQueryCompilerLogEnabled = QueryCompiler.setLogEnabled(ENABLE_QUERY_COMPILER_LOGGING); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - QueryCompiler.setLogEnabled(oldQueryCompilerLogEnabled); - QueryTable.setMemoizeResults(lastMemoize); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - executionContext.close(); - } +public class TestSort extends RefreshingTableTestCase { @FunctionalInterface interface ThrowingConsumer { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSortIncrementalPerformance.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSortIncrementalPerformance.java index 5a86ed68000..14cf95ff280 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSortIncrementalPerformance.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSortIncrementalPerformance.java @@ -4,9 +4,10 @@ package io.deephaven.engine.table.impl; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.context.QueryScope; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; import io.deephaven.engine.testutil.junit4.EngineCleanup; @@ -75,8 +76,9 @@ private R incrementalBenchmark(Table inputTable, Function function final R result = function.apply(filtered); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (filtered.size() < inputTable.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); } return result; diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSymbolTableCombiner.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSymbolTableCombiner.java index 772ad21f823..2d20f886ac6 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSymbolTableCombiner.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestSymbolTableCombiner.java @@ -3,14 +3,15 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.TableUpdate; import io.deephaven.engine.table.TableUpdateListener; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.generator.StringGenerator; import io.deephaven.engine.testutil.generator.UniqueLongGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.sources.IntegerSparseArraySource; import io.deephaven.engine.table.impl.sources.regioned.SymbolTableSource; @@ -87,11 +88,12 @@ public void onFailureInternal(Throwable originalException, Entry sourceEntry) { }; symbolTable.addUpdateListener(symbolTableListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < 750; step++) { if (RefreshingTableTestCase.printTableUpdates) { System.out.println("Step = " + step + ", size=" + symbolTable.size()); } - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] updates = GenerateTableUpdates.computeTableUpdates(size / 10, random, symbolTable, columnInfo, true, false, false); symbolTable.notifyListeners(updates[0], updates[1], updates[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestTotalsTable.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestTotalsTable.java index 25307cb6c2d..eb53b5674a2 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestTotalsTable.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestTotalsTable.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; import io.deephaven.engine.testutil.generator.*; @@ -12,7 +13,6 @@ import io.deephaven.engine.util.TotalsTableBuilder; import io.deephaven.function.Numeric; import io.deephaven.vector.DoubleVectorDirect; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.ColumnSource; import io.deephaven.util.QueryConstants; @@ -59,8 +59,8 @@ public void testTotalsTable() { new ShortGenerator())); final TotalsTableBuilder builder = new TotalsTableBuilder(); - final Table totals = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(builder.applyToTable(queryTable))); + final Table totals = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> TotalsTableBuilder.makeTotalsTable(builder.applyToTable(queryTable))); final Map> resultColumns = totals.getColumnSourceMap(); assertEquals(1, totals.size()); assertEquals(new LinkedHashSet<>(Arrays.asList("intCol", "intCol2", "doubleCol", "doubleNullCol", "doubleCol2", @@ -82,8 +82,8 @@ public void testTotalsTable() { builder.setOperation("Sym", "first"); builder.setOperation("intCol2", "last"); - final Table totals2 = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); + final Table totals2 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); assertEquals(new LinkedHashSet<>(Arrays.asList("Sym", "intCol2", "byteCol")), totals2.getColumnSourceMap().keySet()); assertEquals(Numeric.min((byte[]) DataAccessHelpers.getColumn(queryTable, "byteCol").getDirect()), @@ -102,8 +102,8 @@ public void testTotalsTable() { final boolean old = QueryTable.setMemoizeResults(true); try { - final Table totals3 = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); + final Table totals3 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); assertEquals( new LinkedHashSet<>(Arrays.asList("Sym", "intCol2", "doubleCol", "doubleNullCol__Std", "doubleNullCol__Count", "doubleCol2", "byteCol", "shortCol")), @@ -129,8 +129,8 @@ public void testTotalsTable() { EPSILON); assertEquals(queryTable.size(), (long) DataAccessHelpers.getColumn(totals3, "shortCol").get(0)); - final Table totals4 = UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); + final Table totals4 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); assertSame(totals3, totals4); } finally { QueryTable.setMemoizeResults(old); @@ -156,22 +156,22 @@ public void testTotalsTableIncremental() { new EvalNugget() { public Table e() { final TotalsTableBuilder totalsTableBuilder = new TotalsTableBuilder(); - return UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> totalsTableBuilder.applyToTable(queryTable)); + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> totalsTableBuilder.applyToTable(queryTable)); } }, new EvalNugget() { public Table e() { final TotalsTableBuilder totalsTableBuilder = new TotalsTableBuilder(); - return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> TotalsTableBuilder - .makeTotalsTable(totalsTableBuilder.applyToTable(queryTable))); + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> TotalsTableBuilder.makeTotalsTable(totalsTableBuilder.applyToTable(queryTable))); } }, new EvalNugget() { public Table e() { final TotalsTableBuilder totalsTableBuilder = new TotalsTableBuilder(); - return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> TotalsTableBuilder - .makeTotalsTable(totalsTableBuilder.applyToTable(queryTable))); + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> TotalsTableBuilder.makeTotalsTable(totalsTableBuilder.applyToTable(queryTable))); } }, }; diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestUngroupRebase.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestUngroupRebase.java index 0b455f09886..f592d92eea0 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TestUngroupRebase.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TestUngroupRebase.java @@ -3,11 +3,11 @@ */ package io.deephaven.engine.table.impl; -import io.deephaven.engine.table.Table; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.util.ColumnHolder; import io.deephaven.engine.rowset.RowSet; @@ -24,25 +24,27 @@ public void testUngroupAgnosticRebase() { final QueryTable table = TstUtils.testRefreshingTable(col("X", 1, 3), arrayColumnHolder); EvalNugget[] en = new EvalNugget[] { - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(table::ungroup); - } - }, + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .computeLocked(table::ungroup); + }) }; // don't remove or add anything, let's just do one step - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .startCycleForUnitTests(); RowSet keysToAdd = RowSetFactory.empty(); RowSet keysToRemove = RowSetFactory.empty(); RowSet keysToModify = RowSetFactory.empty(); table.notifyListeners(keysToAdd, keysToRemove, keysToModify); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .completeCycleForUnitTests(); TableTools.show(table); TstUtils.validate("ungroupRebase base", en); // Now let's modify the first row, but not cause a rebase - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .startCycleForUnitTests(); keysToAdd = RowSetFactory.empty(); keysToRemove = RowSetFactory.empty(); keysToModify = RowSetFactory.fromKeys(0); @@ -50,20 +52,23 @@ public Table e() { ColumnHolder valueModifications = col("Y", new int[] {10, 20, 30}); TstUtils.addToTable(table, keysToModify, keyModifications, valueModifications); table.notifyListeners(keysToAdd, keysToRemove, keysToModify); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .completeCycleForUnitTests(); TableTools.show(table); TstUtils.validate("ungroupRebase add no rebase", en); // Now let's modify the first row such that we will cause a rebasing operation - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .startCycleForUnitTests(); keysToAdd = RowSetFactory.empty(); keysToRemove = RowSetFactory.empty(); keysToModify = RowSetFactory.fromKeys(0); valueModifications = col("Y", new int[] {10, 20, 30, 40, 50, 60}); TstUtils.addToTable(table, keysToModify, keyModifications, valueModifications); table.notifyListeners(keysToAdd, keysToRemove, keysToModify); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .completeCycleForUnitTests(); TableTools.show(table); TstUtils.validate("ungroupRebase rebase", en); @@ -74,26 +79,26 @@ public Table e() { final QueryTable table2 = TstUtils.testRefreshingTable(col("X", 1, 2, 3, 4), arrayColumnHolder); en = new EvalNugget[] { - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(table2::ungroup); - } - }, + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock() + .computeLocked(table2::ungroup); + }) }; // let's remove the second row, so that we can add something to it on the next step - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast() + .startCycleForUnitTests(); keysToAdd = RowSetFactory.fromKeys(); keysToRemove = RowSetFactory.fromKeys(1); keysToModify = RowSetFactory.fromKeys(); TstUtils.removeRows(table2, keysToRemove); table2.notifyListeners(keysToAdd, keysToRemove, keysToModify); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TableTools.show(table2); TstUtils.validate("ungroupRebase remove", en); // now we want to add it back, causing a rebase, and modify another - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); keysToAdd = RowSetFactory.fromKeys(1); keysToRemove = RowSetFactory.fromKeys(); keysToModify = RowSetFactory.fromKeys(2, 3); @@ -106,29 +111,29 @@ public Table e() { TstUtils.addToTable(table2, keysToAdd, keyAdditions, valueAdditions); TstUtils.addToTable(table2, keysToModify, keyModifications, valueModifications); table2.notifyListeners(keysToAdd, keysToRemove, keysToModify); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TableTools.show(table2); TstUtils.validate("ungroupRebase add rebase", en); // an empty step - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); keysToAdd = RowSetFactory.fromKeys(); keysToRemove = RowSetFactory.fromKeys(); keysToModify = RowSetFactory.fromKeys(); TstUtils.addToTable(table2, keysToModify, intCol("X"), col("Y")); table2.notifyListeners(keysToAdd, keysToRemove, keysToModify); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TableTools.show(table2); TstUtils.validate("ungroupRebase add post rebase", en); // and another step, to make sure everything is fine post rebase - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); keysToAdd = RowSetFactory.fromKeys(); keysToRemove = RowSetFactory.fromKeys(); keysToModify = RowSetFactory.fromKeys(2, 3); TstUtils.addToTable(table2, keysToModify, keyModifications, valueModifications); table2.notifyListeners(keysToAdd, keysToRemove, keysToModify); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); TableTools.show(table2); TstUtils.validate("ungroupRebase add post rebase 2", en); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/TickSuppressorTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/TickSuppressorTest.java index df34ad9e578..745da3945dd 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/TickSuppressorTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/TickSuppressorTest.java @@ -3,22 +3,20 @@ */ package io.deephaven.engine.table.impl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.QueryTableTestBase; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.BooleanGenerator; -import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.util.TickSuppressor; import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.test.types.OutOfBandTest; import java.util.Random; + import org.junit.experimental.categories.Category; import static io.deephaven.engine.util.TableTools.intCol; @@ -136,11 +134,12 @@ public void testRemoveSpuriousModifications() { assertEquals(0, listener.getCount()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> input.notifyListeners(i(), i(), i(5))); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> input.notifyListeners(i(), i(), i(5))); assertEquals(0, listener.getCount()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(input, i(2, 5), intCol("SentinelA", 2, 5), intCol("SentinelB", 8, 11)); input.notifyListeners(i(2), i(), i(5)); }); @@ -153,7 +152,7 @@ public void testRemoveSpuriousModifications() { assertFalse(listener.update.modifiedColumnSet().containsAny(suppressed.newModifiedColumnSet("SentinelA"))); assertTrue(listener.update.modifiedColumnSet().containsAny(suppressed.newModifiedColumnSet("SentinelB"))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(input, i(10, 15), intCol("SentinelA", 12, 15), intCol("SentinelB", 30, 40)); removeRows(input, i(5)); input.notifyListeners(i(), i(5), i(10, 15)); @@ -167,7 +166,7 @@ public void testRemoveSpuriousModifications() { assertTrue(listener.update.modifiedColumnSet().containsAny(suppressed.newModifiedColumnSet("SentinelA"))); assertFalse(listener.update.modifiedColumnSet().containsAny(suppressed.newModifiedColumnSet("SentinelB"))); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(input, i(20), intCol("SentinelA", 20), intCol("SentinelB", 50)); input.notifyListeners(i(20), i(), i()); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/by/TestSortedFirstOrLastByFactory.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/by/TestSortedFirstOrLastByFactory.java index a7096f2e672..26da0a328dc 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/by/TestSortedFirstOrLastByFactory.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/by/TestSortedFirstOrLastByFactory.java @@ -5,17 +5,14 @@ import io.deephaven.api.agg.spec.AggSpec; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.TableUpdateImpl; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.generator.SortedLongGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.SortedBy; import io.deephaven.engine.table.impl.*; import io.deephaven.engine.rowset.RowSetFactory; @@ -127,22 +124,23 @@ public void testIds6445() { // this part is the original bug, if we didn't change the actual value of the row redirection; because the // shift modify combination left it at the same row key; we would not notice the mdoification - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final TableUpdateImpl update = new TableUpdateImpl(); - update.added = RowSetFactory.fromKeys(0); - update.removed = RowSetFactory.empty(); - update.modified = RowSetFactory.fromKeys(2, 4); - update.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); - update.modifiedColumnSet().clear(); - update.modifiedColumnSet().setAll("SFB"); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + final TableUpdateImpl update5 = new TableUpdateImpl(); + update5.added = RowSetFactory.fromKeys(0); + update5.removed = RowSetFactory.empty(); + update5.modified = RowSetFactory.fromKeys(2, 4); + update5.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); + update5.modifiedColumnSet().clear(); + update5.modifiedColumnSet().setAll("SFB"); addToTable(source, RowSetFactory.flat(6), intCol("SFB", 3, 2, 3, 2, 3, 2), intCol("Sentinel", 6, 1, 2, 3, 4, 5), col("DummyBucket", "A", "A", "A", "A", "A", "A")); - final RowSetShiftData.Builder sb = new RowSetShiftData.Builder(); - sb.shiftRange(0, 4, 1); - update.shifted = sb.build(); - source.notifyListeners(update); + final RowSetShiftData.Builder sb5 = new RowSetShiftData.Builder(); + sb5.shiftRange(0, 4, 1); + update5.shifted = sb5.build(); + source.notifyListeners(update5); }); System.out.println("Updated SFB"); @@ -152,22 +150,22 @@ public void testIds6445() { // i'm concerned that if we really modify a row, but we don't detect it in the shift, so here we are just // shifting without modifications - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final TableUpdateImpl update = new TableUpdateImpl(); - update.added = RowSetFactory.fromKeys(0); - update.removed = RowSetFactory.empty(); - update.modified = RowSetFactory.fromKeys(); - update.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); - update.modifiedColumnSet().clear(); - update.modifiedColumnSet().setAll("SFB"); + updateGraph.runWithinUnitTestCycle(() -> { + final TableUpdateImpl update4 = new TableUpdateImpl(); + update4.added = RowSetFactory.fromKeys(0); + update4.removed = RowSetFactory.empty(); + update4.modified = RowSetFactory.fromKeys(); + update4.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); + update4.modifiedColumnSet().clear(); + update4.modifiedColumnSet().setAll("SFB"); addToTable(source, RowSetFactory.flat(7), intCol("SFB", 4, 3, 2, 3, 2, 3, 2), intCol("Sentinel", 7, 6, 1, 2, 3, 4, 5), col("DummyBucket", "A", "A", "A", "A", "A", "A", "A")); - final RowSetShiftData.Builder sb = new RowSetShiftData.Builder(); - sb.shiftRange(0, 5, 1); - update.shifted = sb.build(); - source.notifyListeners(update); + final RowSetShiftData.Builder sb4 = new RowSetShiftData.Builder(); + sb4.shiftRange(0, 5, 1); + update4.shifted = sb4.build(); + source.notifyListeners(update4); }); System.out.println("Shifted SFB"); @@ -178,23 +176,23 @@ public void testIds6445() { TestCase.assertEquals(1, DataAccessHelpers.getColumn(bucketed, "Sentinel").get(0)); // here we are shifting, but not modifying the SFB column (but are modifying sentinel) - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final TableUpdateImpl update = new TableUpdateImpl(); - update.added = RowSetFactory.fromKeys(0); - update.removed = RowSetFactory.empty(); - update.modified = RowSetFactory.fromKeys(3); - update.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); - update.modifiedColumnSet().clear(); - update.modifiedColumnSet().setAll("Sentinel"); + updateGraph.runWithinUnitTestCycle(() -> { + final TableUpdateImpl update3 = new TableUpdateImpl(); + update3.added = RowSetFactory.fromKeys(0); + update3.removed = RowSetFactory.empty(); + update3.modified = RowSetFactory.fromKeys(3); + update3.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); + update3.modifiedColumnSet().clear(); + update3.modifiedColumnSet().setAll("Sentinel"); addToTable(source, RowSetFactory.flat(8), intCol("SFB", 4, 4, 3, 2, 3, 2, 3, 2), intCol("Sentinel", 8, 7, 6, 9, 2, 3, 4, 5), col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A")); - final RowSetShiftData.Builder sb = new RowSetShiftData.Builder(); - sb.shiftRange(0, 6, 1); - update.shifted = sb.build(); - source.notifyListeners(update); + final RowSetShiftData.Builder sb3 = new RowSetShiftData.Builder(); + sb3.shiftRange(0, 6, 1); + update3.shifted = sb3.build(); + source.notifyListeners(update3); }); System.out.println("Shifted and Modified SFB"); @@ -205,23 +203,23 @@ public void testIds6445() { TestCase.assertEquals(9, DataAccessHelpers.getColumn(bucketed, "Sentinel").get(0)); // we are shifting, and claiming to modify SFB but not actually doing it - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final TableUpdateImpl update = new TableUpdateImpl(); - update.added = RowSetFactory.fromKeys(0); - update.removed = RowSetFactory.empty(); - update.modified = RowSetFactory.fromKeys(4); - update.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); - update.modifiedColumnSet().clear(); - update.modifiedColumnSet().setAll("SFB"); + updateGraph.runWithinUnitTestCycle(() -> { + final TableUpdateImpl update2 = new TableUpdateImpl(); + update2.added = RowSetFactory.fromKeys(0); + update2.removed = RowSetFactory.empty(); + update2.modified = RowSetFactory.fromKeys(4); + update2.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); + update2.modifiedColumnSet().clear(); + update2.modifiedColumnSet().setAll("SFB"); addToTable(source, RowSetFactory.flat(9), intCol("SFB", 4, 4, 4, 3, 2, 3, 2, 3, 2), intCol("Sentinel", 10, 8, 7, 6, 9, 2, 3, 4, 5), col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A", "A")); - final RowSetShiftData.Builder sb = new RowSetShiftData.Builder(); - sb.shiftRange(0, 7, 1); - update.shifted = sb.build(); - source.notifyListeners(update); + final RowSetShiftData.Builder sb2 = new RowSetShiftData.Builder(); + sb2.shiftRange(0, 7, 1); + update2.shifted = sb2.build(); + source.notifyListeners(update2); }); System.out.println("Shifted and Modified SFB"); @@ -232,23 +230,23 @@ public void testIds6445() { TestCase.assertEquals(9, DataAccessHelpers.getColumn(bucketed, "Sentinel").get(0)); // here we are shifting, and modifying SFB but not actually doing it - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final TableUpdateImpl update = new TableUpdateImpl(); - update.added = RowSetFactory.fromKeys(0); - update.removed = RowSetFactory.empty(); - update.modified = RowSetFactory.fromKeys(4); - update.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); - update.modifiedColumnSet().clear(); - update.modifiedColumnSet().setAll("SFB"); + updateGraph.runWithinUnitTestCycle(() -> { + final TableUpdateImpl update1 = new TableUpdateImpl(); + update1.added = RowSetFactory.fromKeys(0); + update1.removed = RowSetFactory.empty(); + update1.modified = RowSetFactory.fromKeys(4); + update1.modifiedColumnSet = source.getModifiedColumnSetForUpdates(); + update1.modifiedColumnSet().clear(); + update1.modifiedColumnSet().setAll("SFB"); addToTable(source, RowSetFactory.flat(10), intCol("SFB", 4, 4, 4, 4, 1, 2, 3, 2, 3, 2), intCol("Sentinel", 11, 10, 8, 7, 6, 9, 2, 3, 4, 5), col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A")); - final RowSetShiftData.Builder sb = new RowSetShiftData.Builder(); - sb.shiftRange(0, 8, 1); - update.shifted = sb.build(); - source.notifyListeners(update); + final RowSetShiftData.Builder sb1 = new RowSetShiftData.Builder(); + sb1.shiftRange(0, 8, 1); + update1.shifted = sb1.build(); + source.notifyListeners(update1); }); System.out.println("Shifted and Really Really Modified SFB"); @@ -259,7 +257,7 @@ public void testIds6445() { TestCase.assertEquals(6, DataAccessHelpers.getColumn(bucketed, "Sentinel").get(0)); // claim to modify sfb, but don't really. Actually modify sentinel. - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final TableUpdateImpl update = new TableUpdateImpl(); update.added = RowSetFactory.fromKeys(0); update.removed = RowSetFactory.empty(); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/indexer/TestRowSetIndexer.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/indexer/TestRowSetIndexer.java index 1ec2628fd99..1c2a7d3cad7 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/indexer/TestRowSetIndexer.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/indexer/TestRowSetIndexer.java @@ -5,6 +5,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.base.verify.Require; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.*; import io.deephaven.engine.testutil.ColumnInfo; @@ -13,7 +14,6 @@ import io.deephaven.engine.testutil.generator.SetGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.TupleSource; @@ -85,71 +85,62 @@ private void testGrouping(final boolean immutableColumns, final Random random, f addGroupingValidator(queryTable, "queryTable"); final EvalNugget[] en = new EvalNugget[] { - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.head(0)); - } - }, - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.head(1)); - } - }, - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.update("intCol2 = intCol + 1")); - } - }, - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.update("intCol2 = intCol + 1").select()); - } - }, - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.view("Sym", "intCol2 = intCol + 1")); - } - }, - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.avgBy("Sym").sort("Sym")); - } - }, - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable - .groupBy("Sym", "intCol").sort("Sym", "intCol").view("doubleCol=max(doubleCol)")); - } - }, - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable - .avgBy("Sym", "doubleCol").sort("Sym", "doubleCol").view("intCol=min(intCol)")); - } - }, + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.head(0)); + }), + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.head(1)); + }), + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.update("intCol2 = intCol + 1")); + }), + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.update("intCol2 = intCol + 1").select()); + }), + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.view("Sym", "intCol2 = intCol + 1")); + }), + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.avgBy("Sym").sort("Sym")); + }), + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.groupBy("Sym", "intCol") + .sort("Sym", "intCol") + .view("doubleCol=max(doubleCol)")); + }), + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.avgBy("Sym", "doubleCol") + .sort("Sym", "doubleCol") + .view("intCol=min(intCol)")); + }), }; for (int ii = 0; ii < en.length; ++ii) { addGroupingValidator(en[ii].originalValue, "en[" + ii + "]"); } - Table by = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.avgBy("Sym")); + Table by = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.avgBy("Sym")); addGroupingValidator(by, "groupBy"); - Table avgBy = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.avgBy("Sym")); + Table avgBy = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.avgBy("Sym")); addGroupingValidator(avgBy, "avgBy"); - Table avgBy1 = - UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.avgBy("Sym", "intCol")); + Table avgBy1 = ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> queryTable.avgBy("Sym", "intCol")); addGroupingValidator(avgBy1, "avgBy1"); - Table merged = Require.neqNull( - UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> TableTools.merge(queryTable)), - "TableTools.merge(queryTable)"); + Table merged = Require.neqNull(ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> TableTools.merge(queryTable)), "TableTools.merge(queryTable)"); addGroupingValidator(merged, "merged"); - Table updated = UpdateGraphProcessor.DEFAULT.exclusiveLock() + Table updated = ExecutionContext.getContext().getUpdateGraph().exclusiveLock() .computeLocked(() -> merged.update("HiLo = intCol > 50 ? `Hi` : `Lo`")); addGroupingValidator(updated, "updated"); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/locations/impl/TestKeyValuePartitionLayout.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/locations/impl/TestKeyValuePartitionLayout.java index e61513da568..c413dd25dfe 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/locations/impl/TestKeyValuePartitionLayout.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/locations/impl/TestKeyValuePartitionLayout.java @@ -6,10 +6,12 @@ import io.deephaven.base.FileUtils; import io.deephaven.engine.table.impl.locations.TableDataException; import io.deephaven.engine.table.impl.locations.local.FileTableLocationKey; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.parquet.table.layout.KeyValuePartitionLayout; import junit.framework.TestCase; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.io.File; @@ -25,6 +27,9 @@ @SuppressWarnings("ResultOfMethodCallIgnored") public class TestKeyValuePartitionLayout { + @Rule + final public EngineCleanup framework = new EngineCleanup(); + private File dataDirectory; @Before diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/remote/TestConstructSnapshot.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/remote/TestConstructSnapshot.java index 7ba215a7f4e..424eb833fa9 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/remote/TestConstructSnapshot.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/remote/TestConstructSnapshot.java @@ -4,12 +4,15 @@ package io.deephaven.engine.table.impl.remote; import io.deephaven.base.SleepUtil; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.util.SafeCloseable; import org.apache.commons.lang3.mutable.MutableLong; public class TestConstructSnapshot extends RefreshingTableTestCase { + public void testClockChange() throws InterruptedException { final MutableLong changed = new MutableLong(0); final ConstructSnapshot.SnapshotControl control = new ConstructSnapshot.SnapshotControl() { @@ -24,29 +27,33 @@ public boolean snapshotConsistent(final long currentClockValue, final boolean us return true; } }; - Runnable snapshot_test = - () -> ConstructSnapshot.callDataSnapshotFunction("snapshot test", control, (usePrev, beforeClock) -> { + final ExecutionContext executionContext = ExecutionContext.getContext(); + final Runnable snapshot_test = () -> { + try (final SafeCloseable ignored = executionContext.open()) { + ConstructSnapshot.callDataSnapshotFunction("snapshot test", control, (usePrev, beforeClock) -> { SleepUtil.sleep(1000); if (ConstructSnapshot.concurrentAttemptInconsistent()) { changed.increment(); } return true; }); + } + }; changed.setValue(0); final Thread t = new Thread(snapshot_test); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); t.start(); t.join(); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); assertEquals(0, changed.longValue()); changed.setValue(0); final Thread t2 = new Thread(snapshot_test); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); t2.start(); SleepUtil.sleep(100); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); t2.join(); assertEquals(1, changed.longValue()); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestClockFilters.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestClockFilters.java index 2fb4801ab78..9bdbeb17528 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestClockFilters.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestClockFilters.java @@ -3,28 +3,40 @@ */ package io.deephaven.engine.table.impl.select; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.table.impl.DataAccessHelpers; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import static io.deephaven.engine.util.TableTools.col; import static io.deephaven.engine.util.TableTools.intCol; import static io.deephaven.engine.util.TableTools.merge; import static io.deephaven.engine.util.TableTools.newTable; import static io.deephaven.time.DateTimeUtils.epochNanosToInstant; +import static org.junit.Assert.assertArrayEquals; import io.deephaven.engine.testutil.StepClock; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; /** * Test for Sorted and Unsorted ClockFilter implementations. */ -public class TestClockFilters extends RefreshingTableTestCase { +public class TestClockFilters { - private final Table testInput1; - private final Table testInput2; - private final Table testInput3; - { + private Table testInput1; + private Table testInput2; + private Table testInput3; + + private StepClock clock; + + @Rule + public final EngineCleanup base = new EngineCleanup(); + + @Before + public void setUp() { final Table testInputRangeA = newTable( col("Timestamp", epochNanosToInstant(1000L), epochNanosToInstant(2000L), epochNanosToInstant(3000L), epochNanosToInstant(1000L), epochNanosToInstant(2000L), epochNanosToInstant(3000L)), @@ -36,139 +48,150 @@ public class TestClockFilters extends RefreshingTableTestCase { intCol("Int", 2, 2, 3, 2, 2, 3)); testInput2 = merge(testInputRangeA, testInputRangeB, testInputRangeA); testInput3 = merge(testInputRangeA, testInputRangeB, testInputRangeB); + clock = new StepClock(1000L, 2000L, 3000L); } - private final StepClock clock = new StepClock(1000L, 2000L, 3000L); - + @Test public void testSorted1() { clock.reset(); final SortedClockFilter filter = new SortedClockFilter("Timestamp", clock, true); final Table result = testInput1.sort("Timestamp").where(filter); - assertEquals(new int[] {1, 1, 1, 1, 1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); + assertArrayEquals(new int[] {1, 1, 1, 1, 1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2}, + assertArrayEquals(new int[] {1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, + assertArrayEquals(new int[] {1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); } + @Test public void testUnsorted1() { clock.reset(); final UnsortedClockFilter filter = new UnsortedClockFilter("Timestamp", clock, true); final Table result = testInput1.where(filter); - assertEquals(new int[] {1, 1, 1, 1, 1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); + assertArrayEquals(new int[] {1, 1, 1, 1, 1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2}, + assertArrayEquals(new int[] {1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3}, + assertArrayEquals(new int[] {1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); } + @Test public void testSorted2() { clock.reset(); final SortedClockFilter filter = new SortedClockFilter("Timestamp", clock, true); final Table result = testInput2.sort("Timestamp").where(filter); - assertEquals(new int[] {1, 1, 1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); + assertArrayEquals(new int[] {1, 1, 1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2}, + assertArrayEquals(new int[] {1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, + assertArrayEquals(new int[] {1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); } + @Test public void testUnsorted2() { clock.reset(); final UnsortedClockFilter filter = new UnsortedClockFilter("Timestamp", clock, true); final Table result = testInput2.where(filter); - assertEquals(new int[] {1, 1, 1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); + assertArrayEquals(new int[] {1, 1, 1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2}, + assertArrayEquals(new int[] {1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 3, 1, 2, 3, 1, 2, 3}, + assertArrayEquals(new int[] {1, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 3, 1, 2, 3, 1, 2, 3}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); } + @Test public void testSorted3() { clock.reset(); final SortedClockFilter filter = new SortedClockFilter("Timestamp", clock, true); final Table result = testInput3.sort("Timestamp").where(filter); - assertEquals(new int[] {1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); + assertArrayEquals(new int[] {1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, + assertArrayEquals(new int[] {1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, + assertArrayEquals(new int[] {1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); } + @Test public void testUnsorted3() { clock.reset(); final UnsortedClockFilter filter = new UnsortedClockFilter("Timestamp", clock, true); final Table result = testInput3.where(filter); - assertEquals(new int[] {1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); + assertArrayEquals(new int[] {1, 1}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2}, + assertArrayEquals(new int[] {1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { clock.run(); filter.run(); }); - assertEquals(new int[] {1, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 3}, + assertArrayEquals(new int[] {1, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 3}, (int[]) DataAccessHelpers.getColumn(result, "Int").getDirect()); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestConditionFilterGeneration.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestConditionFilterGeneration.java index 4033e4956ab..7c575c67d90 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestConditionFilterGeneration.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestConditionFilterGeneration.java @@ -29,6 +29,7 @@ public void setUp() { .newQueryLibrary("DEFAULT") .captureQueryCompiler() .captureQueryScope() + .captureUpdateGraph() .build().open(); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestConstantFormulaEvaluation.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestConstantFormulaEvaluation.java index 977b5fda87e..c3db84fd0bd 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestConstantFormulaEvaluation.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestConstantFormulaEvaluation.java @@ -1,6 +1,7 @@ package io.deephaven.engine.table.impl.select; import com.github.javaparser.ast.expr.Expression; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.Table; @@ -11,7 +12,6 @@ import io.deephaven.engine.table.impl.sources.SingleValueColumnSource; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.junit4.EngineCleanup; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import org.junit.Assert; import org.junit.Rule; @@ -360,15 +360,15 @@ public void testRefreshingTableForConstantFormulaColumnSource() { final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6).toTracking(), col("x", 1, 2, 3), col("y", 'a', 'b', 'c')); final String[] formulas = new String[] {"x = x * 2", "z = y", "u=7"}; - QueryTable table2 = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( + QueryTable table2 = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( () -> (QueryTable) table.select(formulas)); Set expectedConstValueColumns = Collections.singleton("u"); Integer[] expectedConstValues = new Integer[] {7}; checkConstantFormula(table2, expectedConstValueColumns, expectedConstValues, int.class); final String[] formulas2 = new String[] {"x = x * 2", "z1 = z", "u1=u", "u2=u1 * 2"}; - QueryTable table3 = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> (QueryTable) table2.select(formulas2)); + QueryTable table3 = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> (QueryTable) table2.select(formulas2)); Set expectedConstValueColumns2 = Collections.singleton("u1"); Integer[] expectedConstValues2 = new Integer[] {7}; // verify parent constant value ColumnSource is same when inherited as is diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestFormulaColumn.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestFormulaColumn.java index f5a5f57f320..e784ed72dc8 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestFormulaColumn.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestFormulaColumn.java @@ -45,15 +45,13 @@ public static Collection data() { return Arrays.asList(new Object[] {false}, new Object[] {true}); } - private final Table testDataTable; - private final Map> availableColumns; + private Table testDataTable; + private Map> availableColumns; private final boolean useKernelFormulas; private boolean kernelFormulasSavedValue; public TestFormulaColumn(boolean useKernelFormulas) { this.useKernelFormulas = useKernelFormulas; - testDataTable = getTestDataTable(); - availableColumns = testDataTable.getDefinition().getColumnNameMap(); } @Rule @@ -61,6 +59,9 @@ public TestFormulaColumn(boolean useKernelFormulas) { @Before public void setUp() throws Exception { + testDataTable = getTestDataTable(); + availableColumns = testDataTable.getDefinition().getColumnNameMap(); + kernelFormulasSavedValue = DhFormulaColumn.useKernelFormulasProperty; DhFormulaColumn.useKernelFormulasProperty = useKernelFormulas; diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestFormulaColumnGeneration.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestFormulaColumnGeneration.java index 8c0d770368f..57b3c9080ce 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestFormulaColumnGeneration.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestFormulaColumnGeneration.java @@ -49,6 +49,7 @@ public void setUp() { .newQueryLibrary("DEFAULT") .captureQueryCompiler() .captureQueryScope() + .captureUpdateGraph() .build().open(); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestSimulationClock.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestSimulationClock.java index d5d91712f7f..58a0497a264 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestSimulationClock.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestSimulationClock.java @@ -3,8 +3,9 @@ */ package io.deephaven.engine.table.impl.select; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import java.time.Instant; @@ -18,8 +19,9 @@ public void testSignal() { final Instant start = DateTimeUtils.now(); final SimulationClock clock = new SimulationClock(start, start.plusNanos(1), 1); clock.start(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ci = 0; ci < 2; ++ci) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(clock::advance); + updateGraph.runWithinUnitTestCycle(clock::advance); } clock.awaitDoneUninterruptibly(); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestWhereFilterFactory.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestWhereFilterFactory.java index dc479d478a6..687183564ca 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestWhereFilterFactory.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/select/TestWhereFilterFactory.java @@ -3,20 +3,16 @@ */ package io.deephaven.engine.table.impl.select; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.rowset.RowSet; -import io.deephaven.util.SafeCloseable; -import junit.framework.TestCase; import java.util.ArrayList; import java.util.List; -public class TestWhereFilterFactory extends TestCase { - - private SafeCloseable executionContext; +public class TestWhereFilterFactory extends RefreshingTableTestCase { private static final String STRING_COLUMN = "Strings"; private static final String INTEGER_COLUMN = "Integers"; @@ -36,9 +32,8 @@ public class TestWhereFilterFactory extends TestCase { private Table table; @Override - protected void setUp() throws Exception { + public void setUp() throws Exception { super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); table = TableTools.newTable( TableTools.col(STRING_COLUMN, NORMAL_STRING, NEEDS_ESCAPE, NO_COMMAS_A, NO_COMMAS_B, WITH_COMMAS_A, WITH_COMMAS_B), @@ -47,12 +42,6 @@ protected void setUp() throws Exception { TableTools.col(BOOLEAN_COLUMN, true, false, true, false, true, false)); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } - public void testColumnNameInValueNormal() { String value = runSimpleFilterExpresion(" in ", NORMAL_STRING); assertEquals(NORMAL_STRING, value); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java index 3530d9d036b..f3f400a2813 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractBooleanColumnSourceTest.java @@ -16,6 +16,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -25,10 +26,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -38,6 +39,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractBooleanColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -45,17 +50,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -68,47 +62,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableObjectChunk dest = WritableObjectChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableObjectChunk dest = WritableObjectChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_BOOLEAN, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_BOOLEAN, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final byte [] expectations = new byte[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, BooleanUtils.NULL_BOOLEAN_AS_BYTE); - // endregion arrayFill - final byte [] randomBooleans = ArrayGenerator.randomBooleans(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final byte randomBoolean = randomBooleans[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomBoolean; - source.set(ii, randomBoolean); + final int expectedBlockSize = 1024; + final byte[] expectations = new byte[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, BooleanUtils.NULL_BOOLEAN_AS_BYTE); + // endregion arrayFill + final byte[] randomBooleans = ArrayGenerator.randomBooleans(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final byte randomBoolean = randomBooleans[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomBoolean; + source.set(ii, randomBoolean); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -309,17 +302,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final BooleanSparseArraySource src = new BooleanSparseArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableObjectChunk chunk = WritableObjectChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in BooleanSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -331,40 +324,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableObjectChunk dest = WritableObjectChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableObjectChunk dest = WritableObjectChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_BOOLEAN, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_BOOLEAN, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final byte [] expectations = new byte[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, BooleanUtils.NULL_BOOLEAN_AS_BYTE); - // endregion arrayFill - final byte [] randomBooleans = ArrayGenerator.randomBooleans(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final byte randomBoolean = randomBooleans[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomBoolean; - source.set(ii, randomBoolean); + final int expectedBlockSize = 1024; + final byte[] expectations = new byte[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, BooleanUtils.NULL_BOOLEAN_AS_BYTE); + // endregion arrayFill + final byte[] randomBooleans = ArrayGenerator.randomBooleans(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final byte randomBoolean = randomBooleans[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomBoolean; + source.set(ii, randomBoolean); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractByteColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractByteColumnSourceTest.java index 7af7589d9f8..f3d62225f47 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractByteColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractByteColumnSourceTest.java @@ -10,6 +10,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -19,10 +20,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -32,6 +33,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractByteColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -39,17 +44,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -62,47 +56,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableByteChunk dest = WritableByteChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableByteChunk dest = WritableByteChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_BYTE, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_BYTE, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final byte [] expectations = new byte[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_BYTE); - // endregion arrayFill - final byte [] randomBytes = ArrayGenerator.randomBytes(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final byte randomByte = randomBytes[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomByte; - source.set(ii, randomByte); + final int expectedBlockSize = 1024; + final byte[] expectations = new byte[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_BYTE); + // endregion arrayFill + final byte[] randomBytes = ArrayGenerator.randomBytes(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final byte randomByte = randomBytes[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomByte; + source.set(ii, randomByte); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -308,17 +301,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final ByteSparseArraySource src = new ByteSparseArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableByteChunk chunk = WritableByteChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in ByteSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -330,40 +323,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableByteChunk dest = WritableByteChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableByteChunk dest = WritableByteChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_BYTE, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_BYTE, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final byte [] expectations = new byte[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_BYTE); - // endregion arrayFill - final byte [] randomBytes = ArrayGenerator.randomBytes(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final byte randomByte = randomBytes[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomByte; - source.set(ii, randomByte); + final int expectedBlockSize = 1024; + final byte[] expectations = new byte[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_BYTE); + // endregion arrayFill + final byte[] randomBytes = ArrayGenerator.randomBytes(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final byte randomByte = randomBytes[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomByte; + source.set(ii, randomByte); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractCharacterColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractCharacterColumnSourceTest.java index e3a116567c6..ef23f6c739c 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractCharacterColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractCharacterColumnSourceTest.java @@ -5,6 +5,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -14,10 +15,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -27,6 +28,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractCharacterColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -34,17 +39,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -57,47 +51,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableCharChunk dest = WritableCharChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableCharChunk dest = WritableCharChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_CHAR, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_CHAR, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final char [] expectations = new char[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_CHAR); - // endregion arrayFill - final char [] randomChars = ArrayGenerator.randomChars(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final char randomChar = randomChars[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomChar; - source.set(ii, randomChar); + final int expectedBlockSize = 1024; + final char[] expectations = new char[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_CHAR); + // endregion arrayFill + final char[] randomChars = ArrayGenerator.randomChars(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final char randomChar = randomChars[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomChar; + source.set(ii, randomChar); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -303,17 +296,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final CharacterSparseArraySource src = new CharacterSparseArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableCharChunk chunk = WritableCharChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in CharacterSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -325,40 +318,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableCharChunk dest = WritableCharChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableCharChunk dest = WritableCharChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_CHAR, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_CHAR, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final char [] expectations = new char[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_CHAR); - // endregion arrayFill - final char [] randomChars = ArrayGenerator.randomChars(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final char randomChar = randomChars[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomChar; - source.set(ii, randomChar); + final int expectedBlockSize = 1024; + final char[] expectations = new char[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_CHAR); + // endregion arrayFill + final char[] randomChars = ArrayGenerator.randomChars(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final char randomChar = randomChars[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomChar; + source.set(ii, randomChar); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractDoubleColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractDoubleColumnSourceTest.java index b0ed0fcf41c..8554f1c35b9 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractDoubleColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractDoubleColumnSourceTest.java @@ -10,6 +10,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -19,10 +20,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -32,6 +33,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractDoubleColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -39,17 +44,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -62,47 +56,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableDoubleChunk dest = WritableDoubleChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableDoubleChunk dest = WritableDoubleChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_DOUBLE, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_DOUBLE, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final double [] expectations = new double[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_DOUBLE); - // endregion arrayFill - final double [] randomDoubles = ArrayGenerator.randomDoubles(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final double randomDouble = randomDoubles[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomDouble; - source.set(ii, randomDouble); + final int expectedBlockSize = 1024; + final double[] expectations = new double[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_DOUBLE); + // endregion arrayFill + final double[] randomDoubles = ArrayGenerator.randomDoubles(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final double randomDouble = randomDoubles[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomDouble; + source.set(ii, randomDouble); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -308,17 +301,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final DoubleSparseArraySource src = new DoubleSparseArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableDoubleChunk chunk = WritableDoubleChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in DoubleSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -330,40 +323,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableDoubleChunk dest = WritableDoubleChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableDoubleChunk dest = WritableDoubleChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_DOUBLE, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_DOUBLE, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final double [] expectations = new double[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_DOUBLE); - // endregion arrayFill - final double [] randomDoubles = ArrayGenerator.randomDoubles(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final double randomDouble = randomDoubles[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomDouble; - source.set(ii, randomDouble); + final int expectedBlockSize = 1024; + final double[] expectations = new double[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_DOUBLE); + // endregion arrayFill + final double[] randomDoubles = ArrayGenerator.randomDoubles(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final double randomDouble = randomDoubles[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomDouble; + source.set(ii, randomDouble); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractFloatColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractFloatColumnSourceTest.java index 2718508c28f..d8b403ab32e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractFloatColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractFloatColumnSourceTest.java @@ -10,6 +10,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -19,10 +20,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -32,6 +33,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractFloatColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -39,17 +44,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -62,47 +56,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableFloatChunk dest = WritableFloatChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableFloatChunk dest = WritableFloatChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_FLOAT, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_FLOAT, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final float [] expectations = new float[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_FLOAT); - // endregion arrayFill - final float [] randomFloats = ArrayGenerator.randomFloats(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final float randomFloat = randomFloats[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomFloat; - source.set(ii, randomFloat); + final int expectedBlockSize = 1024; + final float[] expectations = new float[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_FLOAT); + // endregion arrayFill + final float[] randomFloats = ArrayGenerator.randomFloats(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final float randomFloat = randomFloats[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomFloat; + source.set(ii, randomFloat); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -308,17 +301,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final FloatSparseArraySource src = new FloatSparseArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableFloatChunk chunk = WritableFloatChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in FloatSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -330,40 +323,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableFloatChunk dest = WritableFloatChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableFloatChunk dest = WritableFloatChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_FLOAT, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_FLOAT, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final float [] expectations = new float[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_FLOAT); - // endregion arrayFill - final float [] randomFloats = ArrayGenerator.randomFloats(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final float randomFloat = randomFloats[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomFloat; - source.set(ii, randomFloat); + final int expectedBlockSize = 1024; + final float[] expectations = new float[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_FLOAT); + // endregion arrayFill + final float[] randomFloats = ArrayGenerator.randomFloats(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final float randomFloat = randomFloats[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomFloat; + source.set(ii, randomFloat); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractIntegerColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractIntegerColumnSourceTest.java index 70010333460..8b9cb84987d 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractIntegerColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractIntegerColumnSourceTest.java @@ -10,6 +10,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -19,10 +20,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -32,6 +33,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractIntegerColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -39,17 +44,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -62,47 +56,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableIntChunk dest = WritableIntChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableIntChunk dest = WritableIntChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_INT, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_INT, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final int [] expectations = new int[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_INT); - // endregion arrayFill - final int [] randomInts = ArrayGenerator.randomInts(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final int randomInt = randomInts[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomInt; - source.set(ii, randomInt); + final int expectedBlockSize = 1024; + final int[] expectations = new int[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_INT); + // endregion arrayFill + final int[] randomInts = ArrayGenerator.randomInts(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final int randomInt = randomInts[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomInt; + source.set(ii, randomInt); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -308,17 +301,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final IntegerSparseArraySource src = new IntegerSparseArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableIntChunk chunk = WritableIntChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in IntegerSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -330,40 +323,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableIntChunk dest = WritableIntChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableIntChunk dest = WritableIntChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_INT, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_INT, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final int [] expectations = new int[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_INT); - // endregion arrayFill - final int [] randomInts = ArrayGenerator.randomInts(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final int randomInt = randomInts[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomInt; - source.set(ii, randomInt); + final int expectedBlockSize = 1024; + final int[] expectations = new int[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_INT); + // endregion arrayFill + final int[] randomInts = ArrayGenerator.randomInts(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final int randomInt = randomInts[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomInt; + source.set(ii, randomInt); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractLongColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractLongColumnSourceTest.java index 19cff33b94a..64072184849 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractLongColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractLongColumnSourceTest.java @@ -10,6 +10,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -19,10 +20,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -32,6 +33,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractLongColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -39,17 +44,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -62,47 +56,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableLongChunk dest = WritableLongChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableLongChunk dest = WritableLongChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_LONG, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_LONG, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final long [] expectations = new long[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_LONG); - // endregion arrayFill - final long [] randomLongs = ArrayGenerator.randomLongs(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final long randomLong = randomLongs[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomLong; - source.set(ii, randomLong); + final int expectedBlockSize = 1024; + final long[] expectations = new long[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_LONG); + // endregion arrayFill + final long[] randomLongs = ArrayGenerator.randomLongs(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final long randomLong = randomLongs[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomLong; + source.set(ii, randomLong); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -308,17 +301,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final LongSparseArraySource src = new LongSparseArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableLongChunk chunk = WritableLongChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in LongSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -330,40 +323,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableLongChunk dest = WritableLongChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableLongChunk dest = WritableLongChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_LONG, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_LONG, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final long [] expectations = new long[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_LONG); - // endregion arrayFill - final long [] randomLongs = ArrayGenerator.randomLongs(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final long randomLong = randomLongs[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomLong; - source.set(ii, randomLong); + final int expectedBlockSize = 1024; + final long[] expectations = new long[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_LONG); + // endregion arrayFill + final long[] randomLongs = ArrayGenerator.randomLongs(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final long randomLong = randomLongs[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomLong; + source.set(ii, randomLong); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractObjectColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractObjectColumnSourceTest.java index af31855359f..ccad521206d 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractObjectColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractObjectColumnSourceTest.java @@ -10,6 +10,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -19,10 +20,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -31,6 +32,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractObjectColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -38,17 +43,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -61,47 +55,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableObjectChunk dest = WritableObjectChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableObjectChunk dest = WritableObjectChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, null, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, null, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final Object [] expectations = new Object[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, null); - // endregion arrayFill - final Object [] randomObjects = ArrayGenerator.randomObjects(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final Object randomObject = randomObjects[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomObject; - source.set(ii, randomObject); + final int expectedBlockSize = 1024; + final Object[] expectations = new Object[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, null); + // endregion arrayFill + final Object[] randomObjects = ArrayGenerator.randomObjects(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final Object randomObject = randomObjects[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomObject; + source.set(ii, randomObject); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -307,17 +300,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final ObjectSparseArraySource src = new ObjectSparseArraySource<>(String.class); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableObjectChunk chunk = WritableObjectChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in ObjectSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -329,40 +322,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableObjectChunk dest = WritableObjectChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableObjectChunk dest = WritableObjectChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, null, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, null, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final Object [] expectations = new Object[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, null); - // endregion arrayFill - final Object [] randomObjects = ArrayGenerator.randomObjects(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final Object randomObject = randomObjects[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomObject; - source.set(ii, randomObject); + final int expectedBlockSize = 1024; + final Object[] expectations = new Object[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, null); + // endregion arrayFill + final Object[] randomObjects = ArrayGenerator.randomObjects(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final Object randomObject = randomObjects[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomObject; + source.set(ii, randomObject); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractShortColumnSourceTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractShortColumnSourceTest.java index 2d68d46fe5e..8bb23e48da7 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractShortColumnSourceTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/AbstractShortColumnSourceTest.java @@ -10,6 +10,7 @@ import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -19,10 +20,10 @@ import io.deephaven.engine.table.WritableColumnSource; import io.deephaven.engine.table.impl.DefaultGetContext; import io.deephaven.engine.table.impl.TestSourceSink; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Arrays; @@ -32,6 +33,10 @@ import static junit.framework.TestCase.*; public abstract class AbstractShortColumnSourceTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @NotNull abstract WritableColumnSource makeTestSource(); @@ -39,17 +44,6 @@ int getSourceSize() { return 16384; } - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Test public void testFillChunk() { final Random random = new Random(0); @@ -62,47 +56,46 @@ public void testFillChunk() { private void testFill(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableShortChunk dest = WritableShortChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableShortChunk dest = WritableShortChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_SHORT, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_SHORT, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final short [] expectations = new short[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_SHORT); - // endregion arrayFill - final short [] randomShorts = ArrayGenerator.randomShorts(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final short randomShort = randomShorts[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomShort; - source.set(ii, randomShort); + final int expectedBlockSize = 1024; + final short[] expectations = new short[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_SHORT); + // endregion arrayFill + final short[] randomShorts = ArrayGenerator.randomShorts(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final short randomShort = randomShorts[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomShort; + source.set(ii, randomShort); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); - checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); - - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); - checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 0, expectations.length - 1, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, expectations.length - 100, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 200, expectations.length - 1124, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 700, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 100, 1024, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 250, usePrev); + checkRangeFill(chunkSize, source, fillContext, dest, expectations, 250, 251, usePrev); + + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + final RowSet rowSet = generateIndex(random, expectations.length, 1 + random.nextInt(31)); + checkRandomFill(chunkSize, source, fillContext, dest, expectations, rowSet, usePrev); + } } } - - fillContext.close(); } @Test @@ -308,17 +301,17 @@ public void testSourceSink() { // null reference exception at commit time. The fix is to have the chunk methods bail out early if there is nothing // to do. @Test - public void testFilllEmptyChunkWithPrev() { + public void testFillEmptyChunkWithPrev() { final ShortSparseArraySource src = new ShortSparseArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableShortChunk chunk = WritableShortChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in ShortSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -330,40 +323,39 @@ public void testFillUnordered() { private void testFillUnordered(Random random, int chunkSize) { final WritableColumnSource source = makeTestSource(); - final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); - final WritableShortChunk dest = WritableShortChunk.makeWritableChunk(chunkSize); + try (final ColumnSource.FillContext fillContext = source.makeFillContext(chunkSize); + final WritableShortChunk dest = WritableShortChunk.makeWritableChunk(chunkSize)) { - source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); - for (int ii = 0; ii < 1024; ++ii) { - checkFromSource("null check: " + ii, NULL_SHORT, dest.get(ii)); - } + source.fillChunk(fillContext, dest, RowSetFactory.fromRange(0, 1023)); + for (int ii = 0; ii < 1024; ++ii) { + checkFromSource("null check: " + ii, NULL_SHORT, dest.get(ii)); + } - final int expectedBlockSize = 1024; - final short [] expectations = new short[getSourceSize()]; - // region arrayFill - Arrays.fill(expectations, NULL_SHORT); - // endregion arrayFill - final short [] randomShorts = ArrayGenerator.randomShorts(random, expectations.length / 2); - for (int ii = 0; ii < expectations.length; ++ii) { - final int block = ii / expectedBlockSize; - if (block % 2 == 0) { - final short randomShort = randomShorts[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; - expectations[ii] = randomShort; - source.set(ii, randomShort); + final int expectedBlockSize = 1024; + final short[] expectations = new short[getSourceSize()]; + // region arrayFill + Arrays.fill(expectations, NULL_SHORT); + // endregion arrayFill + final short[] randomShorts = ArrayGenerator.randomShorts(random, expectations.length / 2); + for (int ii = 0; ii < expectations.length; ++ii) { + final int block = ii / expectedBlockSize; + if (block % 2 == 0) { + final short randomShort = randomShorts[(block / 2 * expectedBlockSize) + (ii % expectedBlockSize)]; + expectations[ii] = randomShort; + source.set(ii, randomShort); + } } - } - // before we have the previous tracking enabled, prev should just fall through to get - for (boolean usePrev : new boolean[]{false, true}) { - // lets make a few random indices - for (int seed = 0; seed < 100; ++seed) { - int count = random.nextInt(chunkSize); - try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { - checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + // before we have the previous tracking enabled, prev should just fall through to get + for (boolean usePrev : new boolean[]{false, true}) { + // lets make a few random indices + for (int seed = 0; seed < 100; ++seed) { + int count = random.nextInt(chunkSize); + try (final WritableLongChunk rowKeys = generateRandomKeys(random, count, expectations.length)) { + checkRandomFillUnordered(source, fillContext, dest, expectations, rowKeys, usePrev); + } } } } - - fillContext.close(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestBooleanArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestBooleanArraySource.java index f2fef00f50a..bb5ef0483d6 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestBooleanArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestBooleanArraySource.java @@ -14,6 +14,7 @@ import io.deephaven.util.BooleanUtils; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -21,12 +22,12 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -64,20 +65,20 @@ private void updateFromArray(BooleanArraySource dest, byte[] values) { private void testGetChunkGeneric(byte[] values, byte[] newValues, int chunkSize, RowSet rowSet) { final BooleanArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -264,20 +265,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, byte[] va private void testFillChunkGeneric(byte[] values, byte[] newValues, int chunkSize, RowSet rowSet) { final BooleanArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -526,14 +527,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final BooleanArraySource src = new BooleanArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableObjectChunk chunk = WritableObjectChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in BooleanSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestByteArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestByteArraySource.java index 98eb0ee3ac5..de686cfa793 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestByteArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestByteArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -15,13 +16,13 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -59,20 +60,20 @@ private void updateFromArray(ByteArraySource dest, byte[] values) { private void testGetChunkGeneric(byte[] values, byte[] newValues, int chunkSize, RowSet rowSet) { final ByteArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -266,20 +267,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, byte[] va private void testFillChunkGeneric(byte[] values, byte[] newValues, int chunkSize, RowSet rowSet) { final ByteArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -528,14 +529,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final ByteArraySource src = new ByteArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableByteChunk chunk = WritableByteChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in ByteSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestCharacterArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestCharacterArraySource.java index f7155d51855..3af3137cca1 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestCharacterArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestCharacterArraySource.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -10,13 +11,13 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -54,20 +55,20 @@ private void updateFromArray(CharacterArraySource dest, char[] values) { private void testGetChunkGeneric(char[] values, char[] newValues, int chunkSize, RowSet rowSet) { final CharacterArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -261,20 +262,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, char[] va private void testFillChunkGeneric(char[] values, char[] newValues, int chunkSize, RowSet rowSet) { final CharacterArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -523,14 +524,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final CharacterArraySource src = new CharacterArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableCharChunk chunk = WritableCharChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in CharacterSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestDoubleArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestDoubleArraySource.java index 52f749b53cb..0c39185fbd2 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestDoubleArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestDoubleArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -15,13 +16,13 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -59,20 +60,20 @@ private void updateFromArray(DoubleArraySource dest, double[] values) { private void testGetChunkGeneric(double[] values, double[] newValues, int chunkSize, RowSet rowSet) { final DoubleArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -266,20 +267,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, double[] private void testFillChunkGeneric(double[] values, double[] newValues, int chunkSize, RowSet rowSet) { final DoubleArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -528,14 +529,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final DoubleArraySource src = new DoubleArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableDoubleChunk chunk = WritableDoubleChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in DoubleSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestFloatArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestFloatArraySource.java index 9a12efba90a..21860b0ca81 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestFloatArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestFloatArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -15,13 +16,13 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -59,20 +60,20 @@ private void updateFromArray(FloatArraySource dest, float[] values) { private void testGetChunkGeneric(float[] values, float[] newValues, int chunkSize, RowSet rowSet) { final FloatArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -266,20 +267,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, float[] v private void testFillChunkGeneric(float[] values, float[] newValues, int chunkSize, RowSet rowSet) { final FloatArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -528,14 +529,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final FloatArraySource src = new FloatArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableFloatChunk chunk = WritableFloatChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in FloatSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestIntegerArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestIntegerArraySource.java index ca007b62898..7de2644dffe 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestIntegerArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestIntegerArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -15,13 +16,13 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -59,20 +60,20 @@ private void updateFromArray(IntegerArraySource dest, int[] values) { private void testGetChunkGeneric(int[] values, int[] newValues, int chunkSize, RowSet rowSet) { final IntegerArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -266,20 +267,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, int[] val private void testFillChunkGeneric(int[] values, int[] newValues, int chunkSize, RowSet rowSet) { final IntegerArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -528,14 +529,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final IntegerArraySource src = new IntegerArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableIntChunk chunk = WritableIntChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in IntegerSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestLongArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestLongArraySource.java index ebf72330d3a..27aa059d851 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestLongArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestLongArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -15,13 +16,13 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -59,20 +60,20 @@ private void updateFromArray(LongArraySource dest, long[] values) { private void testGetChunkGeneric(long[] values, long[] newValues, int chunkSize, RowSet rowSet) { final LongArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -266,20 +267,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, long[] va private void testFillChunkGeneric(long[] values, long[] newValues, int chunkSize, RowSet rowSet) { final LongArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -528,14 +529,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final LongArraySource src = new LongArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableLongChunk chunk = WritableLongChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in LongSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestObjectArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestObjectArraySource.java index 176fcab7325..bf60bfc9c60 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestObjectArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestObjectArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -15,13 +16,13 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -58,20 +59,20 @@ private void updateFromArray(ObjectArraySource dest, Object[] values) { private void testGetChunkGeneric(Object[] values, Object[] newValues, int chunkSize, RowSet rowSet) { final ObjectArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -265,20 +266,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, Object[] private void testFillChunkGeneric(Object[] values, Object[] newValues, int chunkSize, RowSet rowSet) { final ObjectArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -527,14 +528,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final ObjectArraySource src = new ObjectArraySource<>(String.class); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableObjectChunk chunk = WritableObjectChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in ObjectSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestShortArraySource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestShortArraySource.java index 9e412c41f2e..14b1283498b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestShortArraySource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/TestShortArraySource.java @@ -8,6 +8,7 @@ */ package io.deephaven.engine.table.impl.sources; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.ChunkSink; import io.deephaven.engine.table.ChunkSource; @@ -15,13 +16,13 @@ import io.deephaven.engine.table.impl.TestSourceSink; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.select.FormulaColumn; import io.deephaven.chunk.*; import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeyRanges; import io.deephaven.chunk.attributes.Values; import io.deephaven.base.testing.Shuffle; import io.deephaven.engine.rowset.chunkattributes.RowKeys; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.junit.Rule; @@ -59,20 +60,20 @@ private void updateFromArray(ShortArraySource dest, short[] values) { private void testGetChunkGeneric(short[] values, short[] newValues, int chunkSize, RowSet rowSet) { final ShortArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValues(chunkSize, newValues, rowSet, source); validatePrevValues(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -266,20 +267,20 @@ private void testParameterChunkAndIndex(Random random, int sourceSize, short[] v private void testFillChunkGeneric(short[] values, short[] newValues, int chunkSize, RowSet rowSet) { final ShortArraySource source; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { source = forArray(values); validateValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try { updateFromArray(source, newValues); validateValuesWithFill(chunkSize, newValues, rowSet, source); validatePrevValuesWithFill(chunkSize, values, rowSet, source); } finally { - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } @@ -528,14 +529,14 @@ public void confirmAliasingForbidden() { public void testFillEmptyChunkWithPrev() { final ShortArraySource src = new ShortArraySource(); src.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); try (final RowSet keys = RowSetFactory.empty(); final WritableShortChunk chunk = WritableShortChunk.makeWritableChunk(0)) { // Fill from an empty chunk src.fillFromChunkByKeys(keys, chunk); } // NullPointerException in ShortSparseArraySource.commitUpdates() - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/chunkcolumnsource/TestChunkColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/chunkcolumnsource/TestChunkColumnSource.java index d338e3577fd..074ff3cecf3 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/chunkcolumnsource/TestChunkColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/chunkcolumnsource/TestChunkColumnSource.java @@ -6,8 +6,8 @@ import gnu.trove.list.array.TLongArrayList; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.table.ChunkSource; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.table.impl.sources.LongAsInstantColumnSource; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.time.DateTimeUtils; import io.deephaven.chunk.util.hashing.IntChunkEquals; import io.deephaven.engine.table.impl.sources.ByteAsBooleanColumnSource; @@ -18,133 +18,126 @@ import io.deephaven.util.QueryConstants; import junit.framework.TestCase; import org.apache.commons.lang3.mutable.MutableInt; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.time.Instant; public class TestChunkColumnSource { - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } + @Rule + public final EngineCleanup framework = new EngineCleanup(); @Test public void testSimple() { - final WritableCharChunk charChunk1 = WritableCharChunk.makeWritableChunk(1024); - final WritableCharChunk charChunk2 = WritableCharChunk.makeWritableChunk(1024); - for (int ii = 0; ii < 1024; ++ii) { - charChunk1.set(ii, (char) (1024 + ii)); - charChunk2.set(ii, (char) (2048 + ii)); - } - - final CharChunkColumnSource columnSource = new CharChunkColumnSource(); - columnSource.addChunk(charChunk1); - columnSource.addChunk(charChunk2); - - TestCase.assertEquals(QueryConstants.NULL_CHAR, columnSource.getChar(-1)); - TestCase.assertEquals(QueryConstants.NULL_CHAR, columnSource.getChar(2048)); - - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(charChunk1.get(ii), columnSource.getChar(ii)); - TestCase.assertEquals(charChunk2.get(ii), columnSource.getChar(ii + 1024)); - } - - final WritableCharChunk destChunk = WritableCharChunk.makeWritableChunk(2048); - try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { - columnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(0, 2047)); - TestCase.assertEquals(2048, destChunk.size()); + try (final WritableCharChunk charChunk1 = WritableCharChunk.makeWritableChunk(1024); + final WritableCharChunk charChunk2 = WritableCharChunk.makeWritableChunk(1024); + final WritableCharChunk destChunk = WritableCharChunk.makeWritableChunk(2048);) { for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(charChunk1.get(ii), destChunk.get(ii)); - TestCase.assertEquals(charChunk2.get(ii), destChunk.get(ii + 1024)); - } - } - - try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { - columnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(2047, 2047)); - TestCase.assertEquals(1, destChunk.size()); - TestCase.assertEquals(charChunk2.get(1023), destChunk.get(0)); - } - - try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { - columnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(10, 20)); - TestCase.assertEquals(11, destChunk.size()); - for (int ii = 0; ii <= 10; ++ii) { - TestCase.assertEquals(charChunk1.get(ii + 10), destChunk.get(ii)); - } - } - - try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { - columnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(1020, 1030)); - TestCase.assertEquals(11, destChunk.size()); - for (int ii = 0; ii <= 3; ++ii) { - TestCase.assertEquals(charChunk1.get(ii + 1020), destChunk.get(ii)); - } - for (int ii = 4; ii <= 10; ++ii) { - TestCase.assertEquals(charChunk2.get(ii - 4), destChunk.get(ii)); + charChunk1.set(ii, (char) (1024 + ii)); + charChunk2.set(ii, (char) (2048 + ii)); } - } - try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { - final CharChunk values = - columnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 2047)).asCharChunk(); - TestCase.assertEquals(2048, values.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(charChunk1.get(ii), values.get(ii)); - TestCase.assertEquals(charChunk2.get(ii), values.get(ii + 1024)); - } - } + final CharChunkColumnSource columnSource = new CharChunkColumnSource(); + columnSource.addChunk(charChunk1); + columnSource.addChunk(charChunk2); - try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { - final CharChunk values = - columnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 1023)).asCharChunk(); - TestCase.assertEquals(1024, values.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(charChunk1.get(ii), values.get(ii)); - } - } + TestCase.assertEquals(QueryConstants.NULL_CHAR, columnSource.getChar(-1)); + TestCase.assertEquals(QueryConstants.NULL_CHAR, columnSource.getChar(2048)); - try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { - final CharChunk values = - columnSource.getChunk(getContext, RowSequenceFactory.forRange(1024, 2047)).asCharChunk(); - TestCase.assertEquals(1024, values.size()); for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(charChunk2.get(ii), values.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { - final CharChunk values = - columnSource.getChunk(getContext, RowSequenceFactory.forRange(2047, 2047)).asCharChunk(); - TestCase.assertEquals(1, values.size()); - TestCase.assertEquals(charChunk2.get(1023), values.get(0)); - } - - try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { - final CharChunk values = - columnSource.getChunk(getContext, RowSequenceFactory.forRange(10, 20)).asCharChunk(); - TestCase.assertEquals(11, values.size()); - for (int ii = 0; ii <= 10; ++ii) { - TestCase.assertEquals(charChunk1.get(ii + 10), values.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { - final CharChunk values = - columnSource.getChunk(getContext, RowSequenceFactory.forRange(1020, 1030)).asCharChunk(); - TestCase.assertEquals(11, values.size()); - for (int ii = 0; ii <= 3; ++ii) { - TestCase.assertEquals(charChunk1.get(ii + 1020), values.get(ii)); - } - for (int ii = 4; ii <= 10; ++ii) { - TestCase.assertEquals(charChunk2.get(ii - 4), values.get(ii)); + TestCase.assertEquals(charChunk1.get(ii), columnSource.getChar(ii)); + TestCase.assertEquals(charChunk2.get(ii), columnSource.getChar(ii + 1024)); + } + + try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { + columnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(0, 2047)); + TestCase.assertEquals(2048, destChunk.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(charChunk1.get(ii), destChunk.get(ii)); + TestCase.assertEquals(charChunk2.get(ii), destChunk.get(ii + 1024)); + } + } + + try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { + columnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(2047, 2047)); + TestCase.assertEquals(1, destChunk.size()); + TestCase.assertEquals(charChunk2.get(1023), destChunk.get(0)); + } + + try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { + columnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(10, 20)); + TestCase.assertEquals(11, destChunk.size()); + for (int ii = 0; ii <= 10; ++ii) { + TestCase.assertEquals(charChunk1.get(ii + 10), destChunk.get(ii)); + } + } + + try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { + columnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(1020, 1030)); + TestCase.assertEquals(11, destChunk.size()); + for (int ii = 0; ii <= 3; ++ii) { + TestCase.assertEquals(charChunk1.get(ii + 1020), destChunk.get(ii)); + } + for (int ii = 4; ii <= 10; ++ii) { + TestCase.assertEquals(charChunk2.get(ii - 4), destChunk.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { + final CharChunk values = + columnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 2047)).asCharChunk(); + TestCase.assertEquals(2048, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(charChunk1.get(ii), values.get(ii)); + TestCase.assertEquals(charChunk2.get(ii), values.get(ii + 1024)); + } + } + + try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { + final CharChunk values = + columnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 1023)).asCharChunk(); + TestCase.assertEquals(1024, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(charChunk1.get(ii), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { + final CharChunk values = + columnSource.getChunk(getContext, RowSequenceFactory.forRange(1024, 2047)).asCharChunk(); + TestCase.assertEquals(1024, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(charChunk2.get(ii), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { + final CharChunk values = + columnSource.getChunk(getContext, RowSequenceFactory.forRange(2047, 2047)).asCharChunk(); + TestCase.assertEquals(1, values.size()); + TestCase.assertEquals(charChunk2.get(1023), values.get(0)); + } + + try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { + final CharChunk values = + columnSource.getChunk(getContext, RowSequenceFactory.forRange(10, 20)).asCharChunk(); + TestCase.assertEquals(11, values.size()); + for (int ii = 0; ii <= 10; ++ii) { + TestCase.assertEquals(charChunk1.get(ii + 10), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { + final CharChunk values = + columnSource.getChunk(getContext, RowSequenceFactory.forRange(1020, 1030)).asCharChunk(); + TestCase.assertEquals(11, values.size()); + for (int ii = 0; ii <= 3; ++ii) { + TestCase.assertEquals(charChunk1.get(ii + 1020), values.get(ii)); + } + for (int ii = 4; ii <= 10; ++ii) { + TestCase.assertEquals(charChunk2.get(ii - 4), values.get(ii)); + } } } } @@ -239,192 +232,197 @@ public void testShared() { checkDoubles(doubleChunk1, doubleChunk2, doubleColumnSource); checkLongs(longChunk1, longChunk2, longColumnSource); + + longColumnSource.clear(); + doubleColumnSource.clear(); } private void checkDoubles(WritableDoubleChunk doubleChunk1, WritableDoubleChunk doubleChunk2, ChunkColumnSource doubleColumnSource) { - final WritableDoubleChunk destChunk = WritableDoubleChunk.makeWritableChunk(2048); - try (final ChunkSource.FillContext doubleFillContext = doubleColumnSource.makeFillContext(2048)) { - doubleColumnSource.fillChunk(doubleFillContext, destChunk, RowSequenceFactory.forRange(0, 2047)); - TestCase.assertEquals(2048, destChunk.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(doubleChunk1.get(ii), destChunk.get(ii)); - TestCase.assertEquals(doubleChunk2.get(ii), destChunk.get(ii + 1024)); - } - } - - try (final ChunkSource.FillContext fillContext = doubleColumnSource.makeFillContext(2048)) { - doubleColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(2047, 2047)); - TestCase.assertEquals(1, destChunk.size()); - TestCase.assertEquals(doubleChunk2.get(1023), destChunk.get(0)); - } - - try (final ChunkSource.FillContext fillContext = doubleColumnSource.makeFillContext(2048)) { - doubleColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(10, 20)); - TestCase.assertEquals(11, destChunk.size()); - for (int ii = 0; ii <= 10; ++ii) { - TestCase.assertEquals(doubleChunk1.get(ii + 10), destChunk.get(ii)); - } - } - - try (final ChunkSource.FillContext fillContext = doubleColumnSource.makeFillContext(2048)) { - doubleColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(1020, 1030)); - TestCase.assertEquals(11, destChunk.size()); - for (int ii = 0; ii <= 3; ++ii) { - TestCase.assertEquals(doubleChunk1.get(ii + 1020), destChunk.get(ii)); - } - for (int ii = 4; ii <= 10; ++ii) { - TestCase.assertEquals(doubleChunk2.get(ii - 4), destChunk.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = - doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 2047)).asDoubleChunk(); - TestCase.assertEquals(2048, values.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(doubleChunk1.get(ii), values.get(ii)); - TestCase.assertEquals(doubleChunk2.get(ii), values.get(ii + 1024)); - } - } - - try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = - doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 1023)).asDoubleChunk(); - TestCase.assertEquals(1024, values.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(doubleChunk1.get(ii), values.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = - doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(1024, 2047)).asDoubleChunk(); - TestCase.assertEquals(1024, values.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(doubleChunk2.get(ii), values.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = - doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(2047, 2047)).asDoubleChunk(); - TestCase.assertEquals(1, values.size()); - TestCase.assertEquals(doubleChunk2.get(1023), values.get(0)); - } - - try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = - doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(10, 20)).asDoubleChunk(); - TestCase.assertEquals(11, values.size()); - for (int ii = 0; ii <= 10; ++ii) { - TestCase.assertEquals(doubleChunk1.get(ii + 10), values.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = - doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(1020, 1030)).asDoubleChunk(); - TestCase.assertEquals(11, values.size()); - for (int ii = 0; ii <= 3; ++ii) { - TestCase.assertEquals(doubleChunk1.get(ii + 1020), values.get(ii)); - } - for (int ii = 4; ii <= 10; ++ii) { - TestCase.assertEquals(doubleChunk2.get(ii - 4), values.get(ii)); + try (final WritableDoubleChunk destChunk = WritableDoubleChunk.makeWritableChunk(2048)) { + try (final ChunkSource.FillContext doubleFillContext = doubleColumnSource.makeFillContext(2048)) { + doubleColumnSource.fillChunk(doubleFillContext, destChunk, RowSequenceFactory.forRange(0, 2047)); + TestCase.assertEquals(2048, destChunk.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(doubleChunk1.get(ii), destChunk.get(ii)); + TestCase.assertEquals(doubleChunk2.get(ii), destChunk.get(ii + 1024)); + } + } + + try (final ChunkSource.FillContext fillContext = doubleColumnSource.makeFillContext(2048)) { + doubleColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(2047, 2047)); + TestCase.assertEquals(1, destChunk.size()); + TestCase.assertEquals(doubleChunk2.get(1023), destChunk.get(0)); + } + + try (final ChunkSource.FillContext fillContext = doubleColumnSource.makeFillContext(2048)) { + doubleColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(10, 20)); + TestCase.assertEquals(11, destChunk.size()); + for (int ii = 0; ii <= 10; ++ii) { + TestCase.assertEquals(doubleChunk1.get(ii + 10), destChunk.get(ii)); + } + } + + try (final ChunkSource.FillContext fillContext = doubleColumnSource.makeFillContext(2048)) { + doubleColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(1020, 1030)); + TestCase.assertEquals(11, destChunk.size()); + for (int ii = 0; ii <= 3; ++ii) { + TestCase.assertEquals(doubleChunk1.get(ii + 1020), destChunk.get(ii)); + } + for (int ii = 4; ii <= 10; ++ii) { + TestCase.assertEquals(doubleChunk2.get(ii - 4), destChunk.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 2047)).asDoubleChunk(); + TestCase.assertEquals(2048, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(doubleChunk1.get(ii), values.get(ii)); + TestCase.assertEquals(doubleChunk2.get(ii), values.get(ii + 1024)); + } + } + + try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 1023)).asDoubleChunk(); + TestCase.assertEquals(1024, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(doubleChunk1.get(ii), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { + final DoubleChunk values = doubleColumnSource + .getChunk(getContext, RowSequenceFactory.forRange(1024, 2047)).asDoubleChunk(); + TestCase.assertEquals(1024, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(doubleChunk2.get(ii), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { + final DoubleChunk values = doubleColumnSource + .getChunk(getContext, RowSequenceFactory.forRange(2047, 2047)).asDoubleChunk(); + TestCase.assertEquals(1, values.size()); + TestCase.assertEquals(doubleChunk2.get(1023), values.get(0)); + } + + try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, RowSequenceFactory.forRange(10, 20)).asDoubleChunk(); + TestCase.assertEquals(11, values.size()); + for (int ii = 0; ii <= 10; ++ii) { + TestCase.assertEquals(doubleChunk1.get(ii + 10), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { + final DoubleChunk values = doubleColumnSource + .getChunk(getContext, RowSequenceFactory.forRange(1020, 1030)).asDoubleChunk(); + TestCase.assertEquals(11, values.size()); + for (int ii = 0; ii <= 3; ++ii) { + TestCase.assertEquals(doubleChunk1.get(ii + 1020), values.get(ii)); + } + for (int ii = 4; ii <= 10; ++ii) { + TestCase.assertEquals(doubleChunk2.get(ii - 4), values.get(ii)); + } } } } private void checkLongs(WritableLongChunk longChunk1, WritableLongChunk longChunk2, ChunkColumnSource longColumnSource) { - final WritableLongChunk destChunk = WritableLongChunk.makeWritableChunk(2048); - try (final ChunkSource.FillContext longFillContext = longColumnSource.makeFillContext(2048)) { - longColumnSource.fillChunk(longFillContext, destChunk, RowSequenceFactory.forRange(0, 2047)); - TestCase.assertEquals(2048, destChunk.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(longChunk1.get(ii), destChunk.get(ii)); - TestCase.assertEquals(longChunk2.get(ii), destChunk.get(ii + 1024)); - } - } - - try (final ChunkSource.FillContext fillContext = longColumnSource.makeFillContext(2048)) { - longColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(2047, 2047)); - TestCase.assertEquals(1, destChunk.size()); - TestCase.assertEquals(longChunk2.get(1023), destChunk.get(0)); - } - - try (final ChunkSource.FillContext fillContext = longColumnSource.makeFillContext(2048)) { - longColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(10, 20)); - TestCase.assertEquals(11, destChunk.size()); - for (int ii = 0; ii <= 10; ++ii) { - TestCase.assertEquals(longChunk1.get(ii + 10), destChunk.get(ii)); - } - } - - try (final ChunkSource.FillContext fillContext = longColumnSource.makeFillContext(2048)) { - longColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(1020, 1030)); - TestCase.assertEquals(11, destChunk.size()); - for (int ii = 0; ii <= 3; ++ii) { - TestCase.assertEquals(longChunk1.get(ii + 1020), destChunk.get(ii)); - } - for (int ii = 4; ii <= 10; ++ii) { - TestCase.assertEquals(longChunk2.get(ii - 4), destChunk.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = - longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 2047)).asLongChunk(); - TestCase.assertEquals(2048, values.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(longChunk1.get(ii), values.get(ii)); - TestCase.assertEquals(longChunk2.get(ii), values.get(ii + 1024)); - } - } - - try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = - longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 1023)).asLongChunk(); - TestCase.assertEquals(1024, values.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(longChunk1.get(ii), values.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = - longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(1024, 2047)).asLongChunk(); - TestCase.assertEquals(1024, values.size()); - for (int ii = 0; ii < 1024; ++ii) { - TestCase.assertEquals(longChunk2.get(ii), values.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = - longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(2047, 2047)).asLongChunk(); - TestCase.assertEquals(1, values.size()); - TestCase.assertEquals(longChunk2.get(1023), values.get(0)); - } - - try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = - longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(10, 20)).asLongChunk(); - TestCase.assertEquals(11, values.size()); - for (int ii = 0; ii <= 10; ++ii) { - TestCase.assertEquals(longChunk1.get(ii + 10), values.get(ii)); - } - } - - try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = - longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(1020, 1030)).asLongChunk(); - TestCase.assertEquals(11, values.size()); - for (int ii = 0; ii <= 3; ++ii) { - TestCase.assertEquals(longChunk1.get(ii + 1020), values.get(ii)); - } - for (int ii = 4; ii <= 10; ++ii) { - TestCase.assertEquals(longChunk2.get(ii - 4), values.get(ii)); + try (final WritableLongChunk destChunk = WritableLongChunk.makeWritableChunk(2048)) { + try (final ChunkSource.FillContext longFillContext = longColumnSource.makeFillContext(2048)) { + longColumnSource.fillChunk(longFillContext, destChunk, RowSequenceFactory.forRange(0, 2047)); + TestCase.assertEquals(2048, destChunk.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(longChunk1.get(ii), destChunk.get(ii)); + TestCase.assertEquals(longChunk2.get(ii), destChunk.get(ii + 1024)); + } + } + + try (final ChunkSource.FillContext fillContext = longColumnSource.makeFillContext(2048)) { + longColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(2047, 2047)); + TestCase.assertEquals(1, destChunk.size()); + TestCase.assertEquals(longChunk2.get(1023), destChunk.get(0)); + } + + try (final ChunkSource.FillContext fillContext = longColumnSource.makeFillContext(2048)) { + longColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(10, 20)); + TestCase.assertEquals(11, destChunk.size()); + for (int ii = 0; ii <= 10; ++ii) { + TestCase.assertEquals(longChunk1.get(ii + 10), destChunk.get(ii)); + } + } + + try (final ChunkSource.FillContext fillContext = longColumnSource.makeFillContext(2048)) { + longColumnSource.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(1020, 1030)); + TestCase.assertEquals(11, destChunk.size()); + for (int ii = 0; ii <= 3; ++ii) { + TestCase.assertEquals(longChunk1.get(ii + 1020), destChunk.get(ii)); + } + for (int ii = 4; ii <= 10; ++ii) { + TestCase.assertEquals(longChunk2.get(ii - 4), destChunk.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { + final LongChunk values = + longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 2047)).asLongChunk(); + TestCase.assertEquals(2048, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(longChunk1.get(ii), values.get(ii)); + TestCase.assertEquals(longChunk2.get(ii), values.get(ii + 1024)); + } + } + + try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { + final LongChunk values = + longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(0, 1023)).asLongChunk(); + TestCase.assertEquals(1024, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(longChunk1.get(ii), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { + final LongChunk values = + longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(1024, 2047)).asLongChunk(); + TestCase.assertEquals(1024, values.size()); + for (int ii = 0; ii < 1024; ++ii) { + TestCase.assertEquals(longChunk2.get(ii), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { + final LongChunk values = + longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(2047, 2047)).asLongChunk(); + TestCase.assertEquals(1, values.size()); + TestCase.assertEquals(longChunk2.get(1023), values.get(0)); + } + + try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { + final LongChunk values = + longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(10, 20)).asLongChunk(); + TestCase.assertEquals(11, values.size()); + for (int ii = 0; ii <= 10; ++ii) { + TestCase.assertEquals(longChunk1.get(ii + 10), values.get(ii)); + } + } + + try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { + final LongChunk values = + longColumnSource.getChunk(getContext, RowSequenceFactory.forRange(1020, 1030)).asLongChunk(); + TestCase.assertEquals(11, values.size()); + for (int ii = 0; ii <= 3; ++ii) { + TestCase.assertEquals(longChunk1.get(ii + 1020), values.get(ii)); + } + for (int ii = 4; ii <= 10; ++ii) { + TestCase.assertEquals(longChunk2.get(ii - 4), values.get(ii)); + } } } } @@ -460,8 +458,8 @@ public void testBooleanWrapper() { TestCase.assertEquals(makeExpectBoolean(ii), wrapped.get(ii)); } - final WritableObjectChunk destChunk = WritableObjectChunk.makeWritableChunk(2048); - try (final ChunkSource.FillContext fillContext = wrapped.makeFillContext(32)) { + try (final WritableObjectChunk destChunk = WritableObjectChunk.makeWritableChunk(2048); + final ChunkSource.FillContext fillContext = wrapped.makeFillContext(32)) { wrapped.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(0, 31)); TestCase.assertEquals(32, destChunk.size()); for (int ii = 0; ii < 32; ++ii) { @@ -477,6 +475,8 @@ public void testBooleanWrapper() { TestCase.assertEquals(makeExpectBoolean(ii), values.get(ii - 1)); } } + + columnSource.clear(); } private static Instant makeExpectedInstant(int idx) { @@ -502,8 +502,8 @@ public void testInstantWrapper() { TestCase.assertEquals(makeExpectedInstant(ii), wrapped.get(ii)); } - final WritableObjectChunk destChunk = WritableObjectChunk.makeWritableChunk(2048); - try (final ChunkSource.FillContext fillContext = wrapped.makeFillContext(32)) { + try (final WritableObjectChunk destChunk = WritableObjectChunk.makeWritableChunk(2048); + final ChunkSource.FillContext fillContext = wrapped.makeFillContext(32)) { wrapped.fillChunk(fillContext, destChunk, RowSequenceFactory.forRange(0, 31)); TestCase.assertEquals(32, destChunk.size()); for (int ii = 0; ii < 32; ++ii) { @@ -519,6 +519,8 @@ public void testInstantWrapper() { TestCase.assertEquals(makeExpectedInstant(ii), values.get(ii - 1)); } } + + columnSource.clear(); } @Test @@ -558,5 +560,7 @@ public void testClear() { final IntChunk actual = intColumnSource.getChunk(context, 0, 63).asIntChunk(); TestCase.assertTrue(IntChunkEquals.equalReduce(actual, intChunk2)); } + + intColumnSource.clear(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestBooleanDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestBooleanDeltaAwareColumnSource.java index 63d580f1625..375fa1b3a5b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestBooleanDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestBooleanDeltaAwareColumnSource.java @@ -12,7 +12,7 @@ import io.deephaven.util.BooleanUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.BooleanChunk; @@ -21,8 +21,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -33,15 +37,21 @@ import static junit.framework.TestCase.*; public class TestBooleanDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(boolean.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -51,8 +61,7 @@ public void simple1() { final long key1 = 6; final byte expected1 = ArrayGenerator.randomBooleans(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(boolean.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -60,7 +69,7 @@ public void simple1() { final byte actual1 = source.getByte(key1); assertEquals(BooleanUtils.NULL_BOOLEAN_AS_BYTE, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -72,15 +81,14 @@ public void simple2() { final byte expected0_0 = values[0]; final byte expected0_1 = values[1]; final byte expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(boolean.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -94,7 +102,7 @@ public void simple2() { assertEquals(BooleanUtils.NULL_BOOLEAN_AS_BYTE, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -117,8 +125,7 @@ public void overlapping() { final byte[] valuesPhase2 = ArrayGenerator.randomBooleans(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(boolean.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final byte value = valuesPhase1[(int)ii]; @@ -136,10 +143,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final byte value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -153,7 +160,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestByteDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestByteDeltaAwareColumnSource.java index c41e8c1b95d..62ddc3fb657 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestByteDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestByteDeltaAwareColumnSource.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.sources.deltaaware; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.ByteChunk; @@ -17,8 +17,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -29,15 +33,21 @@ import static junit.framework.TestCase.*; public class TestByteDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(byte.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -47,8 +57,7 @@ public void simple1() { final long key1 = 6; final byte expected1 = ArrayGenerator.randomBytes(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(byte.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -56,7 +65,7 @@ public void simple1() { final byte actual1 = source.getByte(key1); assertEquals(NULL_BYTE, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -68,15 +77,14 @@ public void simple2() { final byte expected0_0 = values[0]; final byte expected0_1 = values[1]; final byte expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(byte.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -90,7 +98,7 @@ public void simple2() { assertEquals(NULL_BYTE, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -113,8 +121,7 @@ public void overlapping() { final byte[] valuesPhase2 = ArrayGenerator.randomBytes(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(byte.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final byte value = valuesPhase1[(int)ii]; @@ -132,10 +139,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final byte value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -149,7 +156,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestCharacterDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestCharacterDeltaAwareColumnSource.java index 55e14143364..2445c289097 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestCharacterDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestCharacterDeltaAwareColumnSource.java @@ -3,7 +3,7 @@ */ package io.deephaven.engine.table.impl.sources.deltaaware; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.CharChunk; @@ -12,8 +12,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -24,15 +28,21 @@ import static junit.framework.TestCase.*; public class TestCharacterDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(char.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -42,8 +52,7 @@ public void simple1() { final long key1 = 6; final char expected1 = ArrayGenerator.randomChars(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(char.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -51,7 +60,7 @@ public void simple1() { final char actual1 = source.getChar(key1); assertEquals(NULL_CHAR, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -63,15 +72,14 @@ public void simple2() { final char expected0_0 = values[0]; final char expected0_1 = values[1]; final char expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(char.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -85,7 +93,7 @@ public void simple2() { assertEquals(NULL_CHAR, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -108,8 +116,7 @@ public void overlapping() { final char[] valuesPhase2 = ArrayGenerator.randomChars(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(char.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final char value = valuesPhase1[(int)ii]; @@ -127,10 +134,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final char value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -144,7 +151,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestDoubleDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestDoubleDeltaAwareColumnSource.java index aa64949b149..44058319612 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestDoubleDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestDoubleDeltaAwareColumnSource.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.sources.deltaaware; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.DoubleChunk; @@ -17,8 +17,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -29,15 +33,21 @@ import static junit.framework.TestCase.*; public class TestDoubleDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(double.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -47,8 +57,7 @@ public void simple1() { final long key1 = 6; final double expected1 = ArrayGenerator.randomDoubles(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(double.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -56,7 +65,7 @@ public void simple1() { final double actual1 = source.getDouble(key1); assertEquals(NULL_DOUBLE, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -68,15 +77,14 @@ public void simple2() { final double expected0_0 = values[0]; final double expected0_1 = values[1]; final double expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(double.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -90,7 +98,7 @@ public void simple2() { assertEquals(NULL_DOUBLE, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -113,8 +121,7 @@ public void overlapping() { final double[] valuesPhase2 = ArrayGenerator.randomDoubles(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(double.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final double value = valuesPhase1[(int)ii]; @@ -132,10 +139,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final double value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -149,7 +156,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestFloatDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestFloatDeltaAwareColumnSource.java index cf414eee55e..46acb7240a5 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestFloatDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestFloatDeltaAwareColumnSource.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.sources.deltaaware; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.FloatChunk; @@ -17,8 +17,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -29,15 +33,21 @@ import static junit.framework.TestCase.*; public class TestFloatDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(float.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -47,8 +57,7 @@ public void simple1() { final long key1 = 6; final float expected1 = ArrayGenerator.randomFloats(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(float.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -56,7 +65,7 @@ public void simple1() { final float actual1 = source.getFloat(key1); assertEquals(NULL_FLOAT, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -68,15 +77,14 @@ public void simple2() { final float expected0_0 = values[0]; final float expected0_1 = values[1]; final float expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(float.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -90,7 +98,7 @@ public void simple2() { assertEquals(NULL_FLOAT, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -113,8 +121,7 @@ public void overlapping() { final float[] valuesPhase2 = ArrayGenerator.randomFloats(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(float.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final float value = valuesPhase1[(int)ii]; @@ -132,10 +139,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final float value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -149,7 +156,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestIntegerDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestIntegerDeltaAwareColumnSource.java index bbc8f6a51cc..049e1a6b382 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestIntegerDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestIntegerDeltaAwareColumnSource.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.sources.deltaaware; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.IntChunk; @@ -17,8 +17,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -29,15 +33,21 @@ import static junit.framework.TestCase.*; public class TestIntegerDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(int.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -47,8 +57,7 @@ public void simple1() { final long key1 = 6; final int expected1 = ArrayGenerator.randomInts(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(int.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -56,7 +65,7 @@ public void simple1() { final int actual1 = source.getInt(key1); assertEquals(NULL_INT, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -68,15 +77,14 @@ public void simple2() { final int expected0_0 = values[0]; final int expected0_1 = values[1]; final int expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(int.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -90,7 +98,7 @@ public void simple2() { assertEquals(NULL_INT, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -113,8 +121,7 @@ public void overlapping() { final int[] valuesPhase2 = ArrayGenerator.randomInts(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(int.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final int value = valuesPhase1[(int)ii]; @@ -132,10 +139,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final int value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -149,7 +156,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestLongDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestLongDeltaAwareColumnSource.java index 40ff373ae8e..e211e1b61c3 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestLongDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestLongDeltaAwareColumnSource.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.sources.deltaaware; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.LongChunk; @@ -17,8 +17,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -29,15 +33,21 @@ import static junit.framework.TestCase.*; public class TestLongDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(long.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -47,8 +57,7 @@ public void simple1() { final long key1 = 6; final long expected1 = ArrayGenerator.randomLongs(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(long.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -56,7 +65,7 @@ public void simple1() { final long actual1 = source.getLong(key1); assertEquals(NULL_LONG, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -68,15 +77,14 @@ public void simple2() { final long expected0_0 = values[0]; final long expected0_1 = values[1]; final long expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(long.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -90,7 +98,7 @@ public void simple2() { assertEquals(NULL_LONG, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -113,8 +121,7 @@ public void overlapping() { final long[] valuesPhase2 = ArrayGenerator.randomLongs(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(long.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final long value = valuesPhase1[(int)ii]; @@ -132,10 +139,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final long value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -149,7 +156,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestObjectDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestObjectDeltaAwareColumnSource.java index e722ffe104b..f4f162f2f3d 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestObjectDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestObjectDeltaAwareColumnSource.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.sources.deltaaware; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.ObjectChunk; @@ -17,8 +17,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -28,15 +32,21 @@ import static junit.framework.TestCase.*; public class TestObjectDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(Object.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -46,8 +56,7 @@ public void simple1() { final long key1 = 6; final Object expected1 = ArrayGenerator.randomObjects(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(Object.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -55,7 +64,7 @@ public void simple1() { final Object actual1 = source.get(key1); assertEquals(null, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -67,15 +76,14 @@ public void simple2() { final Object expected0_0 = values[0]; final Object expected0_1 = values[1]; final Object expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(Object.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -89,7 +97,7 @@ public void simple2() { assertEquals(null, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -112,8 +120,7 @@ public void overlapping() { final Object[] valuesPhase2 = ArrayGenerator.randomObjects(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(Object.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final Object value = valuesPhase1[(int)ii]; @@ -131,10 +138,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final Object value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -148,7 +155,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestShortDeltaAwareColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestShortDeltaAwareColumnSource.java index ef25e093f95..e8947a9aa19 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestShortDeltaAwareColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/deltaaware/TestShortDeltaAwareColumnSource.java @@ -8,7 +8,7 @@ */ package io.deephaven.engine.table.impl.sources.deltaaware; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.chunk.ArrayGenerator; import io.deephaven.engine.table.ChunkSource; import io.deephaven.chunk.ShortChunk; @@ -17,8 +17,12 @@ import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.util.SafeCloseable; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.HashMap; @@ -29,15 +33,21 @@ import static junit.framework.TestCase.*; public class TestShortDeltaAwareColumnSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + + DeltaAwareColumnSource source; + @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + public void setUp() { + source = new DeltaAwareColumnSource<>(short.class); } @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + public void tearDown() { + source.releaseCachedResources(); + source = null; } @Test @@ -47,8 +57,7 @@ public void simple1() { final long key1 = 6; final short expected1 = ArrayGenerator.randomShorts(rng, 1)[0]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(short.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key1, expected1); @@ -56,7 +65,7 @@ public void simple1() { final short actual1 = source.getShort(key1); assertEquals(NULL_SHORT, actual0); assertEquals(expected1, actual1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } @Test @@ -68,15 +77,14 @@ public void simple2() { final short expected0_0 = values[0]; final short expected0_1 = values[1]; final short expected1 = values[2]; - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(short.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(10); source.set(key0, expected0_0); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); source.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.set(key0, expected0_1); source.set(key1, expected1); @@ -90,7 +98,7 @@ public void simple2() { assertEquals(NULL_SHORT, actual1_0); assertEquals(expected1, actual1_1); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } /** @@ -113,8 +121,7 @@ public void overlapping() { final short[] valuesPhase2 = ArrayGenerator.randomShorts(rng, length); final HashMap expectedPrev = new HashMap<>(); final HashMap expectedCurrent = new HashMap<>(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - final DeltaAwareColumnSource source = new DeltaAwareColumnSource<>(short.class); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); source.ensureCapacity(length); for (long ii = 0; ii < length; ++ii) { final short value = valuesPhase1[(int)ii]; @@ -132,10 +139,10 @@ public void overlapping() { // Check some subranges using three ranges. final long[] threeRanges = {10, 30, 45, 55, 70, 90}; checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); // Now start the second cycle so we have different current and prev values. - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); for (long ii = 20; ii < 40; ++ii) { final short value = valuesPhase2[(int)ii]; source.set(ii, value); @@ -149,7 +156,7 @@ public void overlapping() { checkUsingGet(source, expectedCurrent, expectedPrev, 0, length); checkUsingChunk(source, expectedCurrent, expectedPrev, singleRange); checkUsingChunk(source, expectedCurrent, expectedPrev, threeRanges); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } private static void checkUsingGet(DeltaAwareColumnSource source, Map expectedCurrent, diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/ring/RingTableToolsTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/ring/RingTableToolsTest.java index aadfbd34332..bfe738a1419 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/ring/RingTableToolsTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/sources/ring/RingTableToolsTest.java @@ -3,14 +3,15 @@ */ package io.deephaven.engine.table.impl.sources.ring; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.BlinkTableTools; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.table.impl.util.ColumnHolder; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.time.DateTimeUtils; @@ -96,8 +97,9 @@ private static void cycleTest(int capacity, int appendSize, int times) { final Table tail = BlinkTableTools.blinkToAppendOnly(streamHelper.blinkTable).tail(capacity); final Table ring = RingTableTools.of(streamHelper.blinkTable, capacity, true); checkEquals(tail, ring); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int i = 0; i < times; ++i) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { streamHelper.addAndNotify(appendSize, holders); checkEquals(tail, ring); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/SsaTestHelpers.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/SsaTestHelpers.java index d6a49a4c788..7b76776bbf6 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/SsaTestHelpers.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/SsaTestHelpers.java @@ -3,11 +3,11 @@ */ package io.deephaven.engine.table.impl.ssa; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.generator.SortedDoubleGenerator; import io.deephaven.engine.testutil.generator.SortedIntGenerator; import io.deephaven.engine.testutil.generator.SortedLongGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.table.impl.QueryTable; import org.jetbrains.annotations.NotNull; @@ -82,8 +82,8 @@ public static SortedIntGenerator getGeneratorForObject() { public static Table prepareTestTableForObject(QueryTable table) { // an update might be faster, but updateView ensures we break when object equality is not the same as == - return UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> table.updateView("Value=String.format(`%06d`, Value)")); + return ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> table.updateView("Value=String.format(`%06d`, Value)")); } public static final class TestDescriptor { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestByteSegmentedSortedArray.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestByteSegmentedSortedArray.java index 81cf390e242..2847b2c879f 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestByteSegmentedSortedArray.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestByteSegmentedSortedArray.java @@ -9,11 +9,12 @@ package io.deephaven.engine.table.impl.ssa; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.*; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.*; @@ -175,8 +176,9 @@ public void onUpdate(TableUpdate upstream) { }; asByte.addUpdateListener(asByteListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, desc.tableSize(), random, table, columnInfo)); try (final ColumnSource.GetContext getContext = valueSource.makeGetContext(asByte.intSize()); @@ -218,8 +220,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asByte.addUpdateListener(asByteListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestCharSegmentedSortedArray.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestCharSegmentedSortedArray.java index 758974eb684..7a525e0bb47 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestCharSegmentedSortedArray.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestCharSegmentedSortedArray.java @@ -4,11 +4,12 @@ package io.deephaven.engine.table.impl.ssa; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.*; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.*; @@ -170,8 +171,9 @@ public void onUpdate(TableUpdate upstream) { }; asCharacter.addUpdateListener(asCharacterListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, desc.tableSize(), random, table, columnInfo)); try (final ColumnSource.GetContext getContext = valueSource.makeGetContext(asCharacter.intSize()); @@ -213,8 +215,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asCharacter.addUpdateListener(asCharacterListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestDoubleSegmentedSortedArray.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestDoubleSegmentedSortedArray.java index 282e9dc019e..c287db715e1 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestDoubleSegmentedSortedArray.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestDoubleSegmentedSortedArray.java @@ -9,11 +9,12 @@ package io.deephaven.engine.table.impl.ssa; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.*; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.*; @@ -175,8 +176,9 @@ public void onUpdate(TableUpdate upstream) { }; asDouble.addUpdateListener(asDoubleListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, desc.tableSize(), random, table, columnInfo)); try (final ColumnSource.GetContext getContext = valueSource.makeGetContext(asDouble.intSize()); @@ -218,8 +220,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asDouble.addUpdateListener(asDoubleListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestFloatSegmentedSortedArray.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestFloatSegmentedSortedArray.java index 051284c9962..01c6559d57e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestFloatSegmentedSortedArray.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestFloatSegmentedSortedArray.java @@ -9,11 +9,12 @@ package io.deephaven.engine.table.impl.ssa; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.*; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.*; @@ -175,8 +176,9 @@ public void onUpdate(TableUpdate upstream) { }; asFloat.addUpdateListener(asFloatListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, desc.tableSize(), random, table, columnInfo)); try (final ColumnSource.GetContext getContext = valueSource.makeGetContext(asFloat.intSize()); @@ -218,8 +220,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asFloat.addUpdateListener(asFloatListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestIntSegmentedSortedArray.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestIntSegmentedSortedArray.java index 786629906fe..4ff482a17cd 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestIntSegmentedSortedArray.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestIntSegmentedSortedArray.java @@ -9,11 +9,12 @@ package io.deephaven.engine.table.impl.ssa; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.*; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.*; @@ -175,8 +176,9 @@ public void onUpdate(TableUpdate upstream) { }; asInteger.addUpdateListener(asIntegerListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, desc.tableSize(), random, table, columnInfo)); try (final ColumnSource.GetContext getContext = valueSource.makeGetContext(asInteger.intSize()); @@ -218,8 +220,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asInteger.addUpdateListener(asIntegerListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestLongSegmentedSortedArray.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestLongSegmentedSortedArray.java index 415e3adf93e..9cc3fb81a95 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestLongSegmentedSortedArray.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestLongSegmentedSortedArray.java @@ -9,11 +9,12 @@ package io.deephaven.engine.table.impl.ssa; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.*; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.*; @@ -175,8 +176,9 @@ public void onUpdate(TableUpdate upstream) { }; asLong.addUpdateListener(asLongListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, desc.tableSize(), random, table, columnInfo)); try (final ColumnSource.GetContext getContext = valueSource.makeGetContext(asLong.intSize()); @@ -218,8 +220,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asLong.addUpdateListener(asLongListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestObjectSegmentedSortedArray.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestObjectSegmentedSortedArray.java index 6b57a666d20..5abbe15609a 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestObjectSegmentedSortedArray.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestObjectSegmentedSortedArray.java @@ -9,11 +9,12 @@ package io.deephaven.engine.table.impl.ssa; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.*; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.*; @@ -175,8 +176,9 @@ public void onUpdate(TableUpdate upstream) { }; asObject.addUpdateListener(asObjectListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, desc.tableSize(), random, table, columnInfo)); try (final ColumnSource.GetContext getContext = valueSource.makeGetContext(asObject.intSize()); @@ -218,8 +220,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asObject.addUpdateListener(asObjectListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestShortSegmentedSortedArray.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestShortSegmentedSortedArray.java index d8eba2ab2a3..75d3595ca9b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestShortSegmentedSortedArray.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssa/TestShortSegmentedSortedArray.java @@ -9,11 +9,12 @@ package io.deephaven.engine.table.impl.ssa; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.*; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.*; @@ -175,8 +176,9 @@ public void onUpdate(TableUpdate upstream) { }; asShort.addUpdateListener(asShortListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, desc.tableSize(), random, table, columnInfo)); try (final ColumnSource.GetContext getContext = valueSource.makeGetContext(asShort.intSize()); @@ -218,8 +220,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asShort.addUpdateListener(asShortListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestByteSegmentedSortedMultiset.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestByteSegmentedSortedMultiset.java index 4d5eeb89254..ffbf919954f 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestByteSegmentedSortedMultiset.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestByteSegmentedSortedMultiset.java @@ -9,13 +9,14 @@ package io.deephaven.engine.table.impl.ssms; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.util.compare.ByteComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -183,8 +184,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asByte.addUpdateListener(asByteListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestCharSegmentedSortedMultiset.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestCharSegmentedSortedMultiset.java index 14a3a51df7e..43d137a1e79 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestCharSegmentedSortedMultiset.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestCharSegmentedSortedMultiset.java @@ -4,13 +4,14 @@ package io.deephaven.engine.table.impl.ssms; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.util.compare.CharComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -178,8 +179,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asCharacter.addUpdateListener(asCharacterListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestDoubleSegmentedSortedMultiset.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestDoubleSegmentedSortedMultiset.java index 3d130ecf5d7..7a0647d5e2f 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestDoubleSegmentedSortedMultiset.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestDoubleSegmentedSortedMultiset.java @@ -9,13 +9,14 @@ package io.deephaven.engine.table.impl.ssms; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.util.compare.DoubleComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -183,8 +184,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asDouble.addUpdateListener(asDoubleListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestFloatSegmentedSortedMultiset.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestFloatSegmentedSortedMultiset.java index ee426bffd8c..2588c8e6817 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestFloatSegmentedSortedMultiset.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestFloatSegmentedSortedMultiset.java @@ -9,13 +9,14 @@ package io.deephaven.engine.table.impl.ssms; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.util.compare.FloatComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -183,8 +184,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asFloat.addUpdateListener(asFloatListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestIntSegmentedSortedMultiset.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestIntSegmentedSortedMultiset.java index c39c4e3a955..6da8de26c21 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestIntSegmentedSortedMultiset.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestIntSegmentedSortedMultiset.java @@ -9,13 +9,14 @@ package io.deephaven.engine.table.impl.ssms; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.util.compare.IntComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -183,8 +184,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asInteger.addUpdateListener(asIntegerListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestLongSegmentedSortedMultiset.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestLongSegmentedSortedMultiset.java index 486243dfa36..a7d039379ff 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestLongSegmentedSortedMultiset.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestLongSegmentedSortedMultiset.java @@ -9,13 +9,14 @@ package io.deephaven.engine.table.impl.ssms; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.util.compare.LongComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -183,8 +184,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asLong.addUpdateListener(asLongListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestObjectSegmentedSortedMultiset.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestObjectSegmentedSortedMultiset.java index 49699f0ed57..72246799b4e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestObjectSegmentedSortedMultiset.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestObjectSegmentedSortedMultiset.java @@ -9,13 +9,14 @@ package io.deephaven.engine.table.impl.ssms; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.util.compare.ObjectComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -171,8 +172,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asObject.addUpdateListener(asObjectListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestShortSegmentedSortedMultiset.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestShortSegmentedSortedMultiset.java index fc28acaa09c..62c70c02aad 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestShortSegmentedSortedMultiset.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/ssms/TestShortSegmentedSortedMultiset.java @@ -9,13 +9,14 @@ package io.deephaven.engine.table.impl.ssms; import io.deephaven.base.verify.AssertionFailure; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ShiftObliviousListener; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.util.compare.ShortComparisons; import io.deephaven.util.datastructures.LongSizedDataStructure; @@ -183,8 +184,9 @@ public void onUpdate(RowSet added, RowSet removed, RowSet modified) { }; asShort.addUpdateListener(asShortListener); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (desc.advance(50)) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet[] notify = GenerateTableUpdates.computeTableUpdates(desc.tableSize(), random, table, columnInfo, allowAddition, allowRemoval, false); assertTrue(notify[2].isEmpty()); table.notifyListeners(notify[0], notify[1], notify[2]); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumMinMax.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumMinMax.java index a27e38f1cdb..e4db985c1d8 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumMinMax.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumMinMax.java @@ -1,13 +1,14 @@ package io.deephaven.engine.table.impl.updateby; import io.deephaven.api.updateby.UpdateByOperation; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.function.Numeric; import io.deephaven.test.types.OutOfBandTest; import org.jetbrains.annotations.NotNull; @@ -143,7 +144,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { if (appendOnly) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> generateAppends(100, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(100, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } else { simulateShiftAwareStep(100, billy, t, result.infos, nuggets); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumProd.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumProd.java index 58615a5fb24..71b09b8f28e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumProd.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumProd.java @@ -1,15 +1,16 @@ package io.deephaven.engine.table.impl.updateby; import io.deephaven.api.updateby.UpdateByControl; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.*; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.function.Numeric; import io.deephaven.test.types.OutOfBandTest; import org.jetbrains.annotations.NotNull; @@ -140,7 +141,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> generateAppends(100, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(100, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -161,7 +163,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(100, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumSum.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumSum.java index 7953d372b32..a870d389d89 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumSum.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestCumSum.java @@ -2,14 +2,15 @@ import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.api.updateby.UpdateByOperation; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.*; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.function.Numeric; import io.deephaven.test.types.OutOfBandTest; import org.jetbrains.annotations.NotNull; @@ -140,7 +141,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> generateAppends(100, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(100, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -161,7 +163,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(100, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestDelta.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestDelta.java index 3f1e950f891..d04429e759f 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestDelta.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestDelta.java @@ -3,17 +3,18 @@ import io.deephaven.api.updateby.DeltaControl; import io.deephaven.api.updateby.NullBehavior; import io.deephaven.api.updateby.UpdateByOperation; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; import org.jetbrains.annotations.NotNull; @@ -256,8 +257,8 @@ private void doTestAppendOnly(boolean bucketed) { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -279,7 +280,7 @@ public void testZeroKeyGeneralTicking() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEmMinMax.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEmMinMax.java index 5fe43ce9bab..de8e0b0c0ef 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEmMinMax.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEmMinMax.java @@ -7,6 +7,7 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.PartitionedTable; @@ -18,11 +19,11 @@ import io.deephaven.engine.table.impl.updateby.em.BaseBigNumberEMOperator; import io.deephaven.engine.table.impl.updateby.em.BasePrimitiveEMOperator; import io.deephaven.engine.table.impl.util.ColumnHolder; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.test.types.OutOfBandTest; import org.jetbrains.annotations.NotNull; import org.junit.Test; @@ -806,10 +807,11 @@ protected Table e() { for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { try { if (appendOnly) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - generateAppends(DYNAMIC_UPDATE_SIZE, billy, tickResult.t, tickResult.infos); - generateAppends(DYNAMIC_UPDATE_SIZE, billy, timeResult.t, timeResult.infos); - }); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> { + generateAppends(DYNAMIC_UPDATE_SIZE, billy, tickResult.t, tickResult.infos); + generateAppends(DYNAMIC_UPDATE_SIZE, billy, timeResult.t, timeResult.infos); + }); validate("Table", nuggets); validate("Table", timeNuggets); } else { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEmStd.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEmStd.java index c77d1fd4a00..f94db07670c 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEmStd.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEmStd.java @@ -7,6 +7,7 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.PartitionedTable; @@ -16,12 +17,12 @@ import io.deephaven.engine.table.impl.TableDefaults; import io.deephaven.engine.table.impl.locations.TableDataException; import io.deephaven.engine.table.impl.util.ColumnHolder; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.test.types.OutOfBandTest; import org.jetbrains.annotations.NotNull; @@ -860,7 +861,8 @@ protected Table e() { for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { try { if (appendOnly) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { generateAppends(DYNAMIC_UPDATE_SIZE, billy, tickResult.t, tickResult.infos); generateAppends(DYNAMIC_UPDATE_SIZE, billy, timeResult.t, timeResult.infos); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEma.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEma.java index 19853683ed8..129cb9afe5e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEma.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEma.java @@ -6,6 +6,7 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; @@ -15,11 +16,11 @@ import io.deephaven.engine.table.impl.TableDefaults; import io.deephaven.engine.table.impl.locations.TableDataException; import io.deephaven.engine.table.impl.util.ColumnHolder; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.string.StringUtils; import io.deephaven.numerics.movingaverages.AbstractMa; @@ -539,10 +540,11 @@ protected Table e() { for (int ii = 0; ii < 100; ii++) { try { if (appendOnly) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - generateAppends(100, billy, tickResult.t, tickResult.infos); - generateAppends(100, billy, timeResult.t, timeResult.infos); - }); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> { + generateAppends(100, billy, tickResult.t, tickResult.infos); + generateAppends(100, billy, timeResult.t, timeResult.infos); + }); validate("Table", nuggets); validate("Table", timeNuggets); } else { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEms.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEms.java index 9b95e9c5e57..7c185e39dcb 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEms.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestEms.java @@ -7,6 +7,7 @@ import io.deephaven.chunk.Chunk; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.PartitionedTable; @@ -16,11 +17,11 @@ import io.deephaven.engine.table.impl.TableDefaults; import io.deephaven.engine.table.impl.locations.TableDataException; import io.deephaven.engine.table.impl.util.ColumnHolder; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; import org.jetbrains.annotations.NotNull; @@ -640,10 +641,11 @@ protected Table e() { for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { try { if (appendOnly) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - generateAppends(DYNAMIC_UPDATE_SIZE, billy, tickResult.t, tickResult.infos); - generateAppends(DYNAMIC_UPDATE_SIZE, billy, timeResult.t, timeResult.infos); - }); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> { + generateAppends(DYNAMIC_UPDATE_SIZE, billy, tickResult.t, tickResult.infos); + generateAppends(DYNAMIC_UPDATE_SIZE, billy, timeResult.t, timeResult.infos); + }); validate("Table", nuggets); validate("Table", timeNuggets); } else { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestForwardFill.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestForwardFill.java index 384235a8d17..92421e134ac 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestForwardFill.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestForwardFill.java @@ -1,18 +1,19 @@ package io.deephaven.engine.table.impl.updateby; import io.deephaven.api.updateby.UpdateByOperation; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.*; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.EvalNuggetInterface; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.function.Basic; import io.deephaven.test.types.OutOfBandTest; @@ -295,7 +296,7 @@ public Table e() { void updateAndValidate(QueryTable src, Table result, ThrowingRunnable updateFunc) throws Exception { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateFunc); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle(updateFunc); try { for (int ii = 0; ii < 2; ii++) { @@ -406,7 +407,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> generateAppends(100, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(100, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingAvg.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingAvg.java index 857b9e5bea2..f17c43e792e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingAvg.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingAvg.java @@ -4,17 +4,18 @@ import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -690,8 +691,8 @@ private void doTestAppendOnly(boolean bucketed, int prevTicks, int postTicks) { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -713,8 +714,8 @@ private void doTestAppendOnlyTimed(boolean bucketed, Duration prevTime, Duration final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -850,7 +851,7 @@ private void doTestTicking(final boolean bucketed, final long prevTicks, final l final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -873,7 +874,7 @@ private void doTestTickingTimed(final boolean bucketed, final Duration prevTime, final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingCount.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingCount.java index 08d232b20e2..6abbf383194 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingCount.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingCount.java @@ -5,17 +5,18 @@ import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.base.verify.Assert; import io.deephaven.chunk.attributes.Any; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -635,8 +636,8 @@ private void doTestAppendOnly(boolean bucketed, int prevTicks, int postTicks) { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -659,8 +660,8 @@ private void doTestAppendOnlyTimed(boolean bucketed, Duration prevTime, Duration final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -797,7 +798,7 @@ private void doTestTicking(final boolean bucketed, final long prevTicks, final l final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -822,7 +823,7 @@ private void doTestTickingTimed(final boolean bucketed, final Duration prevTime, final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingGroup.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingGroup.java index 3d0e51dc699..d1d86f4349b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingGroup.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingGroup.java @@ -4,18 +4,19 @@ import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; import io.deephaven.vector.*; @@ -496,8 +497,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -526,8 +527,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -664,7 +665,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -691,7 +692,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingMinMax.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingMinMax.java index 756855998be..005fe6c1633 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingMinMax.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingMinMax.java @@ -4,17 +4,18 @@ import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -924,8 +925,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -950,8 +951,8 @@ private void doTestAppendOnlyTimed(boolean bucketed, Duration prevTime, Duration final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -1090,7 +1091,7 @@ private void doTestTicking(final boolean bucketed, final long prevTicks, final l final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -1117,7 +1118,7 @@ private void doTestTickingTimed(final boolean bucketed, final Duration prevTime, final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingProduct.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingProduct.java index 893f0b846f1..e3b24089b7a 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingProduct.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingProduct.java @@ -4,17 +4,18 @@ import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -729,8 +730,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -758,8 +759,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -901,7 +902,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -931,7 +932,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingStd.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingStd.java index 8e87acedda8..e4f2a635c95 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingStd.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingStd.java @@ -4,17 +4,18 @@ import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.CharGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -716,8 +717,8 @@ private void doTestAppendOnly(boolean bucketed, int prevTicks, int postTicks) { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -739,8 +740,8 @@ private void doTestAppendOnlyTimed(boolean bucketed, Duration prevTime, Duration final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -876,7 +877,7 @@ private void doTestTicking(final boolean bucketed, final long prevTicks, final l final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -899,7 +900,7 @@ private void doTestTickingTimed(final boolean bucketed, final Duration prevTime, final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingSum.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingSum.java index ce8053ecdbd..9619f2cc125 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingSum.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingSum.java @@ -2,18 +2,19 @@ import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.api.updateby.UpdateByOperation; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.TableDefaults; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.testutil.generator.TestDataGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -520,7 +521,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> generateAppends(100, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(100, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -582,7 +584,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> generateAppends(100, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(100, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } } @@ -641,7 +644,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(100, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -666,7 +669,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(100, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -688,7 +691,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(100, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -710,7 +713,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < 100; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(100, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingWAvg.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingWAvg.java index b4928ef9f0e..d3c72ecd90d 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingWAvg.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestRollingWAvg.java @@ -4,15 +4,16 @@ import io.deephaven.api.updateby.UpdateByControl; import io.deephaven.api.updateby.UpdateByOperation; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.GenerateTableUpdates; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.generator.*; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -1035,8 +1036,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } @@ -1071,8 +1072,8 @@ protected Table e() { billy.setSeed(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t2, result2.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t2, result2.infos)); TstUtils.validate("Table", nuggets); } } @@ -1109,8 +1110,8 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table", nuggets); } @@ -1147,8 +1148,8 @@ protected Table e() { billy.setSeed(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t2, result2.infos)); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> generateAppends(DYNAMIC_UPDATE_SIZE, billy, t2, result2.infos)); TstUtils.validate("Table", nuggets); } } @@ -1297,7 +1298,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -1331,7 +1332,7 @@ protected Table e() { billy.setSeed(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t2, result2.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -1368,7 +1369,7 @@ protected Table e() { final Random billy = new Random(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t, result.infos)); TstUtils.validate("Table - step " + ii, nuggets); } @@ -1405,7 +1406,7 @@ protected Table e() { billy.setSeed(0xB177B177); for (int ii = 0; ii < DYNAMIC_UPDATE_STEPS; ii++) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(DYNAMIC_UPDATE_SIZE, billy, t2, result2.infos)); TstUtils.validate("Table - step " + ii, nuggets); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestUpdateByGeneral.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestUpdateByGeneral.java index 76ebbe52925..8aa9e52f4ea 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestUpdateByGeneral.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/updateby/TestUpdateByGeneral.java @@ -4,10 +4,12 @@ import io.deephaven.api.updateby.BadDataBehavior; import io.deephaven.api.updateby.OperationControl; import io.deephaven.api.updateby.UpdateByOperation; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.UpdateErrorReporter; import io.deephaven.engine.table.impl.util.AsyncClientErrorNotifier; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.table.impl.TableDefaults; import io.deephaven.api.updateby.UpdateByControl; @@ -16,7 +18,6 @@ import io.deephaven.engine.testutil.generator.TestDataGenerator; import io.deephaven.engine.testutil.generator.SortedInstantGenerator; import io.deephaven.engine.updategraph.TerminalNotification; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import io.deephaven.test.types.OutOfBandTest; @@ -173,9 +174,10 @@ public EnumSet diffItems() { for (int step = 0; step < steps; step++) { try { if (appendOnly) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - generateAppends(stepSize, result.random, result.t, result.infos); - }); + ExecutionContext.getContext().getUpdateGraph().cast().runWithinUnitTestCycle( + () -> { + generateAppends(stepSize, result.random, result.t, result.infos); + }); validate("Table", nuggets); } else { simulateShiftAwareStep(stepSize, result.random, result.t, result.infos, nuggets); @@ -205,22 +207,27 @@ public void testNewBuckets() { final QueryTable result = (QueryTable) table.updateBy( List.of(UpdateByOperation.Fill("Filled=Int"), UpdateByOperation.RollingSum(2, "Sum=Int")), "Key"); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Add to "B" bucket + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 8)); // Add to "B" bucket table.notifyListeners(i(8), i(), i()); }); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // New "C" bucket in isolation + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(9), col("Key", "C"), intCol("Int", 10)); // New "C" bucket in isolation table.notifyListeners(i(9), i(), i()); }); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Row from "B" bucket to "C" bucket + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 11)); // Row from "B" bucket to "C" bucket table.notifyListeners(i(), i(), i(8)); }); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // New "D" bucket + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(table, i(10, 11), col("Key", "D", "C"), intCol("Int", 10, 11)); // New "D" bucket table.notifyListeners(i(10, 11), i(), i()); }); @@ -304,7 +311,7 @@ public void testInMemoryColumn() { @Override public void reportUpdateError(Throwable t) { - UpdateGraphProcessor.DEFAULT.addNotification(new TerminalNotification() { + ExecutionContext.getContext().getUpdateGraph().addNotification(new TerminalNotification() { @Override public void run() { System.err.println("Received error notification: " + new ExceptionDetails(t).getFullStackTrace()); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/CumulativeUtilTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/CumulativeUtilTest.java index 13655ff947b..528e93106b6 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/CumulativeUtilTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/CumulativeUtilTest.java @@ -3,28 +3,13 @@ */ package io.deephaven.engine.table.impl.util; -import io.deephaven.base.testing.BaseArrayTestCase; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.util.QueryConstants; import io.deephaven.engine.util.TableTools; -import io.deephaven.util.SafeCloseable; -public class CumulativeUtilTest extends BaseArrayTestCase { - private SafeCloseable executionContext; - - @Override - protected void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } +public class CumulativeUtilTest extends RefreshingTableTestCase { public void testCumSum() { final Table t = TableTools.emptyTable(10).updateView("Row = i"); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowRedirectionLockFreeTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowRedirectionLockFreeTest.java index 8801ff96a16..82791bbcb64 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowRedirectionLockFreeTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowRedirectionLockFreeTest.java @@ -3,7 +3,8 @@ */ package io.deephaven.engine.table.impl.util; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.updategraph.LogicalClock; import gnu.trove.list.array.TLongArrayList; @@ -24,7 +25,7 @@ public class RowRedirectionLockFreeTest extends RefreshingTableTestCase { public void testRowRedirection() throws InterruptedException { final WritableRowRedirectionLockFree index = new RowRedirectionLockFreeFactory().createRowRedirection(10); index.startTrackingPrevValues(); - final long initialStep = LogicalClock.DEFAULT.currentStep(); + final long initialStep = ExecutionContext.getContext().getUpdateGraph().clock().currentStep(); Writer writer = new Writer("writer", initialStep, index); Reader r0 = new Reader("reader0", initialStep, index); Reader r1 = new Reader("reader1", initialStep, index); @@ -111,7 +112,7 @@ private static class Reader extends RWBase { @Override protected final void doOneIteration() { // Figure out what step we're in and what step to read from (current or prev). - final long logicalClockStartValue = LogicalClock.DEFAULT.currentValue(); + final long logicalClockStartValue = ExecutionContext.getContext().getUpdateGraph().clock().currentValue(); final long stepFromCycle = LogicalClock.getStep(logicalClockStartValue); final LogicalClock.State state = LogicalClock.getState(logicalClockStartValue); final long step = state == LogicalClock.State.Updating ? stepFromCycle - 1 : stepFromCycle; @@ -152,7 +153,7 @@ protected final void doOneIteration() { } - final long logicalClockEndValue = LogicalClock.DEFAULT.currentValue(); + final long logicalClockEndValue = ExecutionContext.getContext().getUpdateGraph().clock().currentValue(); if (logicalClockStartValue != logicalClockEndValue) { ++incoherentCycles; return; @@ -194,21 +195,23 @@ private static class Writer extends RWBase { @Override protected final void doOneIteration() { final MutableInt keysInThisGeneration = new MutableInt(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - final long step = LogicalClock.DEFAULT.currentStep(); + // A bit of a waste because we only look at the first 'numKeysToInsert' keys, but that's ok. + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + final long step = updateGraph.clock().currentStep(); keysInThisGeneration.setValue((int) ((step - initialStep) * 1000 + 1000)); final Random rng = new Random(step); final int numKeysToInsert = rng.nextInt(keysInThisGeneration.getValue()); // A bit of a waste because we only look at the first 'numKeysToInsert' keys, but that's ok. long[] keys = fillAndShuffle(rng, keysInThisGeneration.getValue()); final WritableRowRedirectionLockFree ix = index; - for (int ii = 0; ii < numKeysToInsert; ++ii) { - final long key = keys[ii]; - final long value = step * oneBillion + ii; + for (int ii1 = 0; ii1 < numKeysToInsert; ++ii1) { + final long key = keys[ii1]; + final long value = step * oneBillion + ii1; ix.put(key, value); } - for (int ii = numKeysToInsert; ii < keys.length; ++ii) { - final long key = keys[ii]; + for (int ii1 = numKeysToInsert; ii1 < keys.length; ++ii1) { + final long key = keys[ii1]; ix.remove(key); } }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowRedirectionTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowRedirectionTest.java index 15267290957..09b037c6635 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowRedirectionTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowRedirectionTest.java @@ -3,7 +3,8 @@ */ package io.deephaven.engine.table.impl.util; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.io.logger.Logger; import io.deephaven.internal.log.LoggerFactory; @@ -26,20 +27,21 @@ public void testBasic() { } rowRedirection.startTrackingPrevValues(); rowRedirection1.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - for (int i = 0; i < 3; i++) { - rowRedirection1.put(i * 2, i * 3); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + for (int i1 = 0; i1 < 3; i1++) { + rowRedirection1.put(i1 * 2, i1 * 3); } - for (int i = 0; i < 3; i++) { - assertEquals(i * 2, rowRedirection.get(i)); - assertEquals(i * 2, rowRedirection.getPrev(i)); + for (int i1 = 0; i1 < 3; i1++) { + assertEquals(i1 * 2, rowRedirection.get(i1)); + assertEquals(i1 * 2, rowRedirection.getPrev(i1)); - assertEquals(i * 3, rowRedirection1.get(i * 2)); - assertEquals(rowRedirection1.getPrev(i * 2), i * 4); + assertEquals(i1 * 3, rowRedirection1.get(i1 * 2)); + assertEquals(rowRedirection1.getPrev(i1 * 2), i1 * 4); } }); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { for (int i = 0; i < 3; i++) { rowRedirection.put((i + 1) % 3, i * 2); } @@ -65,25 +67,28 @@ public void testContiguous() { // As of startTrackingPrevValues, get() and getPrev() should both be returning 100 + ii * 2 rowRedirection.startTrackingPrevValues(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - for (int ii = 0; ii < 100; ++ii) { - assertEquals(100 + ii * 2, rowRedirection.get(ii)); + // Now set current values to 200 + ii * 3 + // Confirm that get() returns 200 + ii * 3; meanwhile getPrev() still returns 100 + ii * 2 + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { + for (int ii1 = 0; ii1 < 100; ++ii1) { + assertEquals(100 + ii1 * 2, rowRedirection.get(ii1)); } - for (int ii = 0; ii < 100; ++ii) { - assertEquals(100 + ii * 2, rowRedirection.getPrev(ii)); + for (int ii1 = 0; ii1 < 100; ++ii1) { + assertEquals(100 + ii1 * 2, rowRedirection.getPrev(ii1)); } // Now set current values to 200 + ii * 3 - for (int ii = 0; ii < 100; ++ii) { - rowRedirection.put(ii, 200 + ii * 3); + for (int ii1 = 0; ii1 < 100; ++ii1) { + rowRedirection.put(ii1, 200 + ii1 * 3); } // Confirm that get() returns 200 + ii * 3; meanwhile getPrev() still returns 100 + ii * 2 - for (int ii = 0; ii < 100; ++ii) { - assertEquals(200 + ii * 3, rowRedirection.get(ii)); + for (int ii1 = 0; ii1 < 100; ++ii1) { + assertEquals(200 + ii1 * 3, rowRedirection.get(ii1)); } - for (int ii = 0; ii < 100; ++ii) { - assertEquals(100 + ii * 2, rowRedirection.getPrev(ii)); + for (int ii1 = 0; ii1 < 100; ++ii1) { + assertEquals(100 + ii1 * 2, rowRedirection.getPrev(ii1)); } }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowSetShiftDataExpanderTest.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowSetShiftDataExpanderTest.java index d4d270837d3..e4bef7a17bc 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowSetShiftDataExpanderTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/RowSetShiftDataExpanderTest.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.TrackingWritableRowSet; import io.deephaven.engine.rowset.WritableRowSet; @@ -10,15 +11,21 @@ import io.deephaven.engine.table.TableUpdate; import io.deephaven.engine.table.impl.TableUpdateImpl; import io.deephaven.engine.table.ModifiedColumnSet; -import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.engine.updategraph.LogicalClockImpl; +import net.bytebuddy.build.Plugin; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import static org.junit.Assert.assertEquals; public class RowSetShiftDataExpanderTest { + @Rule + final public EngineCleanup framework = new EngineCleanup(); + /** * These tests names have a few qualities worth defining: - Major Shift: shift with no overlap in keyspace before * and after (i.e. modified RowSet from shift alone is empty) - Minor Shift: shift with overlap in keyspace before @@ -512,14 +519,17 @@ private static class Context { public final WritableRowSet expectRemoved = RowSetFactory.empty(); public final WritableRowSet expectModified = RowSetFactory.empty(); + private final LogicalClockImpl clock; + public Context() { - LogicalClock.DEFAULT.resetForUnitTests(); + clock = (LogicalClockImpl) ExecutionContext.getContext().getUpdateGraph().clock(); + clock.resetForUnitTests(); } public void validate() { - LogicalClock.DEFAULT.startUpdateCycle(); + clock.startUpdateCycle(); sourceRowSet.update(expectAdded, expectRemoved); - LogicalClock.DEFAULT.completeUpdateCycle(); + clock.completeUpdateCycle(); final RowSetShiftData shiftData = shifted.build(); shiftData.validate(); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestColumnsToRowsTransform.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestColumnsToRowsTransform.java index 1e54af4b90e..875c025ec37 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestColumnsToRowsTransform.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestColumnsToRowsTransform.java @@ -4,6 +4,7 @@ package io.deephaven.engine.table.impl.util; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.primitive.iterator.CloseableIterator; import io.deephaven.engine.primitive.iterator.CloseablePrimitiveIteratorOfInt; import io.deephaven.engine.table.ChunkSource; @@ -17,7 +18,6 @@ import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.*; @@ -189,27 +189,24 @@ private void testIncremental(int seed) { .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `EyeOne` || Value > 50000")), new QueryTableTestBase.TableComparator( ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3"), - UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> queryTable - .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") - .dropColumns("I1", "I2", "I3").ungroup())), + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(() -> queryTable + .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") + .dropColumns("I1", "I2", "I3").ungroup())), new QueryTableTestBase.TableComparator( ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") .updateView("MappedVal=nameMap.get(Name)") .where("MappedVal in `EyeOne` || Value > 50000"), - UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> queryTable - .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") - .dropColumns("I1", "I2", "I3").ungroup()) + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(() -> queryTable + .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") + .dropColumns("I1", "I2", "I3").ungroup()) .updateView("MappedVal=nameMap.get(Name)") .where("MappedVal in `EyeOne` || Value > 50000")), new QueryTableTestBase.TableComparator( ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `EyeOne`"), - UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> queryTable - .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") - .dropColumns("I1", "I2", "I3").ungroup()) + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(() -> queryTable + .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") + .dropColumns("I1", "I2", "I3").ungroup()) .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `EyeOne`")), EvalNugget.from(() -> ColumnsToRowsTransform .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3").where("Value > 50000")), @@ -220,11 +217,10 @@ private void testIncremental(int seed) { ColumnsToRowsTransform.columnsToRows(queryTable, "Name", new String[] {"IV", "DV"}, new String[] {"First", "Second", "Third"}, new String[][] {new String[] {"I1", "I2", "I3"}, new String[] {"D1", "D2", "D3"}}), - UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> queryTable - .update("Name=new String[]{`First`, `Second`, `Third`}", - "IV=new int[]{I1, I2, I3}", "DV=new double[]{D1, D2, D3}") - .dropColumns("I1", "I2", "I3", "D1", "D2", "D3").ungroup())), + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(() -> queryTable + .update("Name=new String[]{`First`, `Second`, `Third`}", + "IV=new int[]{I1, I2, I3}", "DV=new double[]{D1, D2, D3}") + .dropColumns("I1", "I2", "I3", "D1", "D2", "D3").ungroup())), new QueryTableTestBase.TableComparator( ColumnsToRowsTransform .columnsToRows(queryTable, "Name", new String[] {"IV", "DV"}, @@ -232,7 +228,7 @@ private void testIncremental(int seed) { new String[][] {new String[] {"I1", "I2", "I3"}, new String[] {"D1", "D2", "D3"}}) .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `AiTwo`"), - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> queryTable + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(() -> queryTable .update("Name=new String[]{`First`, `Second`, `Third`}", "IV=new int[]{I1, I2, I3}", "DV=new double[]{D1, D2, D3}") .dropColumns("I1", "I2", "I3", "D1", "D2", "D3").ungroup() diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestDynamicTableWriter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestDynamicTableWriter.java index 38c5da7c41b..c55ff37be2c 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestDynamicTableWriter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestDynamicTableWriter.java @@ -3,9 +3,10 @@ */ package io.deephaven.engine.table.impl.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.UpdateSourceQueryTable; import io.deephaven.engine.testutil.TstUtils; @@ -45,7 +46,8 @@ public void testTypes() throws IOException { writer.getSetter("DTC", Instant.class).set(DateTimeUtils.parseInstant("2020-09-16T07:55:00 NY")); writer.getSetter("BIC", BigInteger.class).set(BigInteger.valueOf(8)); writer.writeRow(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(result::run); final Table expected1 = newTable(byteCol("BC", (byte) 1), charCol("CC", 'A'), @@ -91,7 +93,7 @@ public void testTypes() throws IOException { row2.setFlags(Row.Flags.StartTransaction); row2.writeRow(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + updateGraph.runWithinUnitTestCycle(result::run); TstUtils.assertTableEquals(expected1, result); final Row row3 = writer.getRowWriter(); @@ -109,7 +111,7 @@ public void testTypes() throws IOException { row3.setFlags(Row.Flags.EndTransaction); row3.writeRow(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + updateGraph.runWithinUnitTestCycle(result::run); final Table expected2 = newTable(byteCol("BC", (byte) 1, (byte) 17, (byte) 25), charCol("CC", 'A', 'C', 'D'), @@ -143,7 +145,8 @@ public void testNulls() throws IOException { writer.getSetter("LC").setLong(4); writer.getSetter("FC").setFloat(5.5f); writer.writeRow(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(result::run); final Table expected1 = newTable(byteCol("BC", (byte) 1), charCol("CC", 'A'), @@ -166,7 +169,7 @@ public void testNulls() throws IOException { row.setFlags(Row.Flags.SingleRow); row.writeRow(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + updateGraph.runWithinUnitTestCycle(result::run); final Table expected2 = newTable(byteCol("BC", QueryConstants.NULL_BYTE), charCol("CC", QueryConstants.NULL_CHAR), @@ -193,7 +196,8 @@ public void testTransactions() throws IOException { addRow(writer, Row.Flags.SingleRow, "Fred", 1); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(result::run); final Table lonelyFred = TableTools.newTable(TableTools.stringCol("A", "Fred"), TableTools.intCol("B", 1)); @@ -202,29 +206,29 @@ public void testTransactions() throws IOException { addRow(writer, Row.Flags.StartTransaction, "Barney", 2); addRow(writer, Row.Flags.None, "Betty", 3); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + updateGraph.runWithinUnitTestCycle(result::run); TstUtils.assertTableEquals(lonelyFred, result); addRow(writer, Row.Flags.EndTransaction, "Bam-Bam", 4); TstUtils.assertTableEquals(lonelyFred, result); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + updateGraph.runWithinUnitTestCycle(result::run); final Table withRubbles = TableTools.newTable( TableTools.stringCol("A", "Fred", "Barney", "Betty", "Bam-Bam"), TableTools.intCol("B", 1, 2, 3, 4)); TstUtils.assertTableEquals(withRubbles, result); addRow(writer, Row.Flags.StartTransaction, "Wilma", 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + updateGraph.runWithinUnitTestCycle(result::run); TstUtils.assertTableEquals(withRubbles, result); addRow(writer, Row.Flags.StartTransaction, "Pebbles", 6); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + updateGraph.runWithinUnitTestCycle(result::run); TstUtils.assertTableEquals(withRubbles, result); addRow(writer, Row.Flags.EndTransaction, "Wilma", 7); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(result::run); + updateGraph.runWithinUnitTestCycle(result::run); final Table allTogether = TableTools.newTable(TableTools.stringCol("A", "Fred", "Barney", "Betty", "Bam-Bam", "Pebbles", "Wilma"), TableTools.intCol("B", 1, 2, 3, 4, 6, 7)); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestFreezeBy.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestFreezeBy.java index 3031d8263c3..98c4bf8295d 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestFreezeBy.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestFreezeBy.java @@ -4,11 +4,12 @@ package io.deephaven.engine.table.impl.util; import io.deephaven.api.Selectable; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.context.QueryScope; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.*; @@ -52,7 +53,8 @@ public void testSimpleTypes() { assertEquals(Instant.class, frozen.getDefinition().getColumn("SInstant").getDataType()); assertEquals(Boolean.class, frozen.getDefinition().getColumn("SBoolean").getDataType()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(input, i(0)); TstUtils.addToTable(input, i(2), stringCol("Key", "C"), intCol("Sentinel", 4)); input.notifyListeners(i(), i(0), i(2)); @@ -62,7 +64,7 @@ public void testSimpleTypes() { assertTableEquals(TableTools.newTable(stringCol("Key", "B", "C"), intCol("Sentinel", 2, 3)) .updateView(Selectable.from(updates)), frozen); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(input, i(3, 4), stringCol("Key", "D", "A"), intCol("Sentinel", 5, 6)); input.notifyListeners(i(3, 4), i(), i()); }); @@ -72,7 +74,7 @@ public void testSimpleTypes() { .updateView(Selectable.from(updates)), frozen); // swap two keys - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(input, i(3, 4), stringCol("Key", "A", "D"), intCol("Sentinel", 7, 8)); input.notifyListeners(i(), i(), i(4, 3)); }); @@ -97,7 +99,8 @@ public void testCompositeKeys() { assertTableEquals(input, frozen); // swap two keys - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(input, i(0, 4), stringCol("Key", "A", "D"), intCol("Key2", 101, 101), intCol("Sentinel", 4, 5)); input.notifyListeners(i(4), i(), i(0)); @@ -113,7 +116,8 @@ public void testNoKeys() { final Table frozen = FreezeBy.freezeBy(input); showWithRowSet(frozen); - final Table originalExpect = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(input::snapshot); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Table originalExpect = updateGraph.sharedLock().computeLocked(input::snapshot); assertTableEquals(input, originalExpect); final TableUpdateValidator tuv = TableUpdateValidator.make("frozen", (QueryTable) frozen); @@ -121,7 +125,7 @@ public void testNoKeys() { tuv.getResultTable().addUpdateListener(failureListener); assertTableEquals(input, frozen); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(input, i(0)); TstUtils.addToTable(input, i(2), stringCol("Key", "C"), intCol("Sentinel", 4)); input.notifyListeners(i(2), i(0), i()); @@ -129,37 +133,37 @@ public void testNoKeys() { showWithRowSet(frozen); assertTableEquals(originalExpect, frozen); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(input, i(2), stringCol("Key", "D"), intCol("Sentinel", 5)); input.notifyListeners(i(), i(), i(2)); }); showWithRowSet(frozen); assertTableEquals(originalExpect, frozen); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(input, i(2)); input.notifyListeners(i(), i(2), i()); }); showWithRowSet(frozen); assertTableEquals(originalExpect.head(0), frozen); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(input, i(2), stringCol("Key", "E"), intCol("Sentinel", 6)); input.notifyListeners(i(2), i(), i()); }); showWithRowSet(frozen); - final Table newExpect = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(input::snapshot); + final Table newExpect = updateGraph.sharedLock().computeLocked(input::snapshot); assertTableEquals(input, newExpect); assertTableEquals(newExpect, frozen); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(input, i(3), stringCol("Key", "F"), intCol("Sentinel", 7)); TstUtils.removeRows(input, i(2)); input.notifyListeners(i(3), i(2), i()); }); assertTableEquals(newExpect, frozen); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(input, i(3), stringCol("Key", "G"), intCol("Sentinel", 8)); input.notifyListeners(i(), i(), i(3)); }); @@ -179,7 +183,8 @@ public void testDuplicates() { final Table frozen = FreezeBy.freezeBy(input, "Key"); assertTableEquals(input, frozen); allowingError(() -> { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(input, i(3), stringCol("Key", "A"), intCol("Sentinel", 4)); input.notifyListeners(i(3), i(), i()); }); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestFunctionBackedTableFactory.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestFunctionBackedTableFactory.java index fda1f17263c..47d4547798a 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestFunctionBackedTableFactory.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestFunctionBackedTableFactory.java @@ -3,6 +3,7 @@ */ package io.deephaven.engine.table.impl.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; import io.deephaven.engine.testutil.generator.IntGenerator; @@ -10,7 +11,6 @@ import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.table.impl.*; @@ -37,8 +37,10 @@ public void testIterative() { new QueryTableTest.TableComparator(functionBacked, queryTable), // Note: disable update validation since the function backed table's prev values will always be // incorrect - EvalNugget.from(() -> UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> functionBacked.update("Mult=intCol * doubleCol"))), + EvalNugget.from(() -> { + return ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> functionBacked.update("Mult=intCol * doubleCol")); + }), }; for (int i = 0; i < 75; i++) { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestHashSetBackedTableFactory.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestHashSetBackedTableFactory.java index 113e3bc13ad..873d9ff4ae8 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestHashSetBackedTableFactory.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestHashSetBackedTableFactory.java @@ -3,16 +3,13 @@ */ package io.deephaven.engine.table.impl.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.StringGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import io.deephaven.engine.testutil.EvalNuggetInterface; import io.deephaven.engine.table.impl.QueryTable; -import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.testutil.UpdateValidatorNugget; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.util.TableTools; import io.deephaven.tuple.ArrayTuple; @@ -52,18 +49,15 @@ public void testIterative() { final Random random = new Random(); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - new EvalNugget() { - public Table e() { - return UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> result.update("Arg0=Arg.substring(0, 1)")); - } - }, + EvalNugget.from(() -> ExecutionContext.getContext().getUpdateGraph().exclusiveLock().computeLocked( + () -> result.update("Arg0=Arg.substring(0, 1)"))), new UpdateValidatorNugget(result), }; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 0; ii < 1000; ++ii) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final int additions = random.nextInt(4); final int removals = random.nextInt(4); for (int jj = 0; jj < removals; ++jj) { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestIncrementalReleaseFilter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestIncrementalReleaseFilter.java index 671f1f02456..60a94df615f 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestIncrementalReleaseFilter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestIncrementalReleaseFilter.java @@ -3,13 +3,13 @@ */ package io.deephaven.engine.table.impl.util; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.table.Table; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.select.AutoTuningIncrementalReleaseFilter; import io.deephaven.engine.table.impl.select.IncrementalReleaseFilter; -import io.deephaven.util.annotations.ScriptApi; import junit.framework.TestCase; import java.util.List; @@ -29,8 +29,9 @@ public void testSimple() { TableTools.show(filtered); assertEquals(2, filtered.size()); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 0; ii <= 10; ++ii) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); TableTools.show(filtered); assertEquals(Math.min(3 + ii, 10), filtered.size()); @@ -54,8 +55,9 @@ public void testBigTable() { assertEquals(2, filtered.size()); int cycles = 0; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (filtered.size() < source.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); cycles++; } assertTableEquals(source, filtered); @@ -88,8 +90,10 @@ public void testAutoTune() { } public void testAutoTune2() { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + // I just want to see commas in the output - UpdateGraphProcessor.DEFAULT.setTargetCycleDurationMillis(100); + updateGraph.setTargetCycleDurationMillis(100); final Table source = TableTools.emptyTable(1_000_000); final AutoTuningIncrementalReleaseFilter incrementalReleaseFilter = @@ -97,12 +101,12 @@ public void testAutoTune2() { incrementalReleaseFilter.start(); final Table filtered = source.where(incrementalReleaseFilter); - final Table updated = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> filtered.update("I=0")); + final Table updated = updateGraph.sharedLock().computeLocked(() -> filtered.update("I=0")); int steps = 0; while (filtered.size() < source.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); if (steps++ > 100) { TestCase.fail("Did not release rows promptly."); } @@ -112,7 +116,8 @@ public void testAutoTune2() { } private int testAutoTuneCycle(int cycleTime) { - UpdateGraphProcessor.DEFAULT.setTargetCycleDurationMillis(cycleTime); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.setTargetCycleDurationMillis(cycleTime); final Table source = TableTools.emptyTable(10_000); TableTools.show(source); @@ -121,12 +126,12 @@ private int testAutoTuneCycle(int cycleTime) { incrementalReleaseFilter.start(); final Table filtered = source.updateView("I = ii").where(incrementalReleaseFilter); - final Table updated = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> filtered + final Table updated = updateGraph.sharedLock().computeLocked(() -> filtered .update("I=io.deephaven.engine.table.impl.util.TestIncrementalReleaseFilter.sleepValue(100000, I)")); int cycles = 0; while (filtered.size() < source.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::run); + updateGraph.runWithinUnitTestCycle(incrementalReleaseFilter::run); System.out.println(filtered.size() + " / " + updated.size()); if (cycles++ > (2 * (source.size() * 100) / cycleTime)) { TestCase.fail("Did not release rows promptly."); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestKeyedArrayBackedMutableTable.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestKeyedArrayBackedMutableTable.java index f826ac37edf..14fcc5a3c69 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestKeyedArrayBackedMutableTable.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestKeyedArrayBackedMutableTable.java @@ -6,8 +6,9 @@ import io.deephaven.UncheckedDeephavenException; import io.deephaven.base.SleepUtil; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.util.config.InputTableStatusListener; import io.deephaven.engine.util.config.MutableInputTable; @@ -164,7 +165,8 @@ public void testAddRows() throws Throwable { mutableInputTable.addRow(randyMap, true, listener); SleepUtil.sleep(100); listener.assertIncomplete(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(kabut::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(kabut::run); assertTableEquals(TableTools.merge(input, input2), kabut); listener.waitForCompletion(); listener.assertSuccess(); @@ -177,7 +179,7 @@ public void testAddRows() throws Throwable { mutableInputTable.addRow(randyMap2, false, listener2); SleepUtil.sleep(100); listener2.assertIncomplete(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(kabut::run); + updateGraph.runWithinUnitTestCycle(kabut::run); assertTableEquals(TableTools.merge(input, input2), kabut); listener2.waitForCompletion(); listener2.assertFailure(IllegalArgumentException.class, "Can not edit keys Randy"); @@ -236,7 +238,8 @@ public void testSetRows() { CollectionUtil.mapFromArray(String.class, Object.class, "Name", "George", "Employer", "Cogswell"); mutableInputTable.setRow(defaultValues, 0, cogMap); SleepUtil.sleep(100); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(kabut::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(kabut::run); assertTableEquals(TableTools.merge(input, ex2).lastBy("Name"), kabut); } @@ -299,8 +302,11 @@ private void handleDelayedRefresh(final BaseArrayBackedMutableTable table, table.setOnPendingChange(gate::countDown); try { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); refreshThread = new Thread(() -> { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // If this unexpected interruption happens, the test thread may hang in action.run() + // indefinitely. Best to hope it's already queued the pending action and proceed with run. + updateGraph.runWithinUnitTestCycle(() -> { try { gate.await(); } catch (InterruptedException ignored) { diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestRedirectedColumnSource.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestRedirectedColumnSource.java index f8a5d4002c0..288f6800b87 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestRedirectedColumnSource.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestRedirectedColumnSource.java @@ -3,11 +3,12 @@ */ package io.deephaven.engine.table.impl.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.context.QueryScope; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.sources.RedirectedColumnSource; import io.deephaven.util.BooleanUtils; @@ -107,9 +108,10 @@ public void testFillChunk() { final IncrementalReleaseFilter incFilter = new IncrementalReleaseFilter(stepSz, stepSz); final Table live = t.where(incFilter).sort("IntsCol"); final int chunkSz = stepSz - 7; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); try (final WritableObjectChunk chunk = WritableObjectChunk.makeWritableChunk(chunkSz)) { while (live.size() < t.size()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(incFilter::run); + updateGraph.runWithinUnitTestCycle(incFilter::run); doFillAndCheck(live, "StringsCol", chunk, chunkSz); } } @@ -137,11 +139,11 @@ public void testIds6196() { TstUtils.testRefreshingTable(RowSetFactory.flat(6).toTracking(), intCol("IntVal", 0, 1, 2, 3, 4, 5)); - final Table a = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( + final Table a = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( () -> qt.update("I2=3+IntVal", "BoolVal=ids6196_values[IntVal % ids6196_values.length]")); showWithRowSet(a); - final Table b = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> a.naturalJoin(a, "I2=IntVal", "BoolVal2=BoolVal")); + final Table b = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> a.naturalJoin(a, "I2=IntVal", "BoolVal2=BoolVal")); showWithRowSet(b); final TByteList byteList = new TByteArrayList(6); @@ -166,8 +168,8 @@ public void testIds6196() { assertArrayEquals(expecteds, chunkResult); } - final Table c = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> a.naturalJoin(b, "I2=IntVal", "BoolVal3=BoolVal2")); + final Table c = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> a.naturalJoin(b, "I2=IntVal", "BoolVal3=BoolVal2")); showWithRowSet(c); final ColumnSource reinterpretedC = c.getColumnSource("BoolVal3").reinterpret(byte.class); byteList.clear(); @@ -203,14 +205,15 @@ public void testIds6196() { assertArrayEquals(nullBytes, chunkResult); } - final Table captured = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(c::snapshot); + final Table captured = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(c::snapshot); showWithRowSet(captured); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); TstUtils.addToTable(qt, RowSetFactory.flat(3), intCol("IntVal", 1, 2, 3)); qt.notifyListeners(RowSetFactory.empty(), RowSetFactory.empty(), RowSetFactory.flat(3)); - UpdateGraphProcessor.DEFAULT.flushAllNormalNotificationsForUnitTests(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.flushAllNormalNotificationsForUnitTests(); System.out.println("A:"); showWithRowSet(a); @@ -232,6 +235,6 @@ public void testIds6196() { }); assertArrayEquals(expecteds, byteList.toArray()); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); } } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestSyncTableFilter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestSyncTableFilter.java index dbc6a185879..207e66f4ce6 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestSyncTableFilter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestSyncTableFilter.java @@ -3,15 +3,15 @@ */ package io.deephaven.engine.table.impl.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.impl.*; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.util.QueryConstants; import junit.framework.TestCase; @@ -38,10 +38,11 @@ public void testSimple() { final QueryTable b = TstUtils.testRefreshingTable(longCol("ID", 0, 0, 2, 2, 4, 4), intCol("Sentinel", 201, 202, 203, 204, 205, 206), col("Key", "a", "a", "a", "a", "a", "a")); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); final SyncTableFilter.Builder builder = new SyncTableFilter.Builder("ID"); builder.addTable("a", a); builder.addTable("b", b); - final Map result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(builder::build); + final Map result = updateGraph.sharedLock().computeLocked(builder::build); assertEquals(Set.of("a", "b"), result.keySet()); @@ -57,7 +58,7 @@ public void testSimple() { assertTableEquals(fa, ex1a); assertTableEquals(fb, ex1b); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); a.notifyListeners(i(10, 11), i(), i()); }); @@ -68,7 +69,7 @@ public void testSimple() { final Table ex2a = newTable(longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); final Table ex2b = newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); b.notifyListeners(i(10, 11), i(), i()); }); @@ -89,7 +90,8 @@ public void testSimpleAddAgain() { final SyncTableFilter.Builder builder = new SyncTableFilter.Builder("ID"); builder.addTable("a", a); builder.addTable("b", b); - final Map result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(builder::build); + final Map result = + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(builder::build); assertEquals(Set.of("a", "b"), result.keySet()); @@ -105,7 +107,8 @@ public void testSimpleAddAgain() { assertTableEquals(fa, ex1a); assertTableEquals(fb, ex1b); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); a.notifyListeners(i(10, 11), i(), i()); }); @@ -117,7 +120,7 @@ public void testSimpleAddAgain() { col("Key", "b", "b", "c", "c")); final Table ex2b = newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); b.notifyListeners(i(10, 11), i(), i()); TstUtils.addToTable(a, i(12, 13), longCol("ID", 5, 5), intCol("Sentinel", 109, 110), col("Key", "c", "c")); @@ -133,7 +136,7 @@ public void testSimpleAddAgain() { final Table ex3b = newTable(longCol("ID", 5, 5, 5), intCol("Sentinel", 207, 208, 209), col("Key", "a", "a", "a")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(b, i(12, 13), longCol("ID", 5, 6), intCol("Sentinel", 209, 210), col("Key", "a", "a")); b.notifyListeners(i(12, 13), i(), i()); }); @@ -144,7 +147,7 @@ public void testSimpleAddAgain() { assertTableEquals(fa, ex2a); assertTableEquals(fb, ex3b); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(a, i(14, 15), longCol("ID", 5, 6), intCol("Sentinel", 111, 112), col("Key", "a", "a")); a.notifyListeners(i(14, 15), i(), i()); }); @@ -170,7 +173,8 @@ public void testNullAppearance() { final SyncTableFilter.Builder builder = new SyncTableFilter.Builder().defaultId("ID").defaultKeys() .addTable("a", a) .addTable("b", b, "ID"); - final Map result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(builder::build); + final Map result = + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(builder::build); assertEquals(Set.of("a", "b"), result.keySet()); @@ -185,7 +189,8 @@ public void testNullAppearance() { assertTableEquals(fa, empty); assertTableEquals(fb, empty); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); TstUtils.addToTable(a, i(2, 3), longCol("ID", 2, 2), intCol("Sentinel", 103, 104), col("Key", "a", "a")); a.notifyListeners(i(10, 11), i(), i(2, 3)); @@ -200,7 +205,7 @@ public void testNullAppearance() { final Table ex2a = newTable(longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); final Table ex2b = newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); b.notifyListeners(i(10, 11), i(), i()); }); @@ -222,7 +227,8 @@ public void testSimpleKeyed() { final SyncTableFilter.Builder builder = new SyncTableFilter.Builder(); builder.addTable("a", a, "ID", "Key"); builder.addTable("b", b, "Ego", "Klyuch"); - final Map result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(builder::build); + final Map result = + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(builder::build); assertEquals(Set.of("a", "b"), result.keySet()); @@ -247,7 +253,8 @@ public void testSimpleKeyed() { assertTableEquals(fa, ex1a); assertTableEquals(fb, ex1b); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(b, i(10, 11), longCol("Ego", 5, 5), intCol("Sentinel", 207, 208), col("Klyuch", "b", "c")); b.notifyListeners(i(10, 11), i(), i()); @@ -261,7 +268,7 @@ public void testSimpleKeyed() { assertTableEquals(fa, ex2a); assertTableEquals(fb, ex2b); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(a, i(20, 21), longCol("ID", 5, 5), intCol("Sentinel", 111, 112), col("Key", "c", "c")); a.notifyListeners(i(20, 21), i(), i()); }); @@ -281,7 +288,7 @@ public void testSimpleKeyed() { System.out.println("A before modfications."); showWithRowSet(a, 30); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(a, i(20, 21), longCol("ID", 5, 5), intCol("Sentinel", 113, 114), col("Key", "c", "c")); a.notifyListeners(i(), i(), i(20, 21)); }); @@ -301,7 +308,8 @@ public void testErrorPropagation() { final SyncTableFilter.Builder builder = new SyncTableFilter.Builder(); builder.addTable("a", a, "ID", "Key"); builder.addTable("b", b, "Ego", "Klyuch"); - final Map result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(builder::build); + final Map result = + ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(builder::build); final Table fa = result.get("a"); final Table fb = result.get("b"); @@ -312,11 +320,11 @@ public void testErrorPropagation() { final ErrorListener la = new ErrorListener("fa", fa); final ErrorListener lb = new ErrorListener("fb", fb); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().startCycleForUnitTests(); allowingError(() -> { a.getRowSet().writableCast().remove(1); a.notifyListeners(i(), i(1), i()); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + ExecutionContext.getContext().getUpdateGraph().cast().completeCycleForUnitTests(); }, throwables -> { TestCase.assertEquals(1, getUpdateErrors().size()); final Throwable throwable = throwables.get(0); @@ -339,7 +347,9 @@ public void testDependencies() { final SyncTableFilter.Builder builder = new SyncTableFilter.Builder(); builder.addTable("a", a, "ID"); builder.addTable("b", b, "Ego"); - final Map result = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(builder::build); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Map result = + updateGraph.sharedLock().computeLocked(builder::build); final Table fa = result.get("a"); final Table fb = result.get("b"); @@ -348,38 +358,39 @@ public void testDependencies() { ((QueryTable) fb).setAttribute("NAME", "b"); - final Table fau = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> fa.update("SentinelDoubled=Sentinel*2")); - final Table fbu = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> fb.update("SentinelDoubled=Sentinel*2")); - final Table joined = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> fau.naturalJoin(fbu, "Key=Klyuch", "SB=Sentinel,SBD=SentinelDoubled")); - final Table sentSum = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> joined.update("SS=SBD+SentinelDoubled")); + final Table fau = updateGraph.sharedLock().computeLocked( + () -> fa.update("SentinelDoubled=Sentinel*2")); + final Table fbu = updateGraph.sharedLock().computeLocked( + () -> fb.update("SentinelDoubled=Sentinel*2")); + final Table joined = updateGraph.sharedLock().computeLocked( + () -> fau.naturalJoin(fbu, "Key=Klyuch", "SB=Sentinel,SBD=SentinelDoubled")); + final Table sentSum = updateGraph.sharedLock().computeLocked( + () -> joined.update("SS=SBD+SentinelDoubled")); showWithRowSet(sentSum); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); - assertTrue(sentSum.satisfied(LogicalClock.DEFAULT.currentStep())); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.startCycleForUnitTests(); + assertTrue(sentSum.satisfied(updateGraph.clock().currentStep())); + updateGraph.completeCycleForUnitTests(); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + updateGraph.startCycleForUnitTests(); addToTable(a, i(1), longCol("ID", 1), intCol("Sentinel", 102), col("Key", "b")); a.notifyListeners(i(1), i(), i()); - assertFalse(fa.satisfied(LogicalClock.DEFAULT.currentStep())); - assertFalse(fb.satisfied(LogicalClock.DEFAULT.currentStep())); - assertFalse(sentSum.satisfied(LogicalClock.DEFAULT.currentStep())); + assertFalse(fa.satisfied(updateGraph.clock().currentStep())); + assertFalse(fb.satisfied(updateGraph.clock().currentStep())); + assertFalse(sentSum.satisfied(updateGraph.clock().currentStep())); - while (!fa.satisfied(LogicalClock.DEFAULT.currentStep())) { - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + while (!fa.satisfied(updateGraph.clock().currentStep())) { + updateGraph + .flushOneNotificationForUnitTests(); } - assertTrue(fa.satisfied(LogicalClock.DEFAULT.currentStep())); - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); - assertTrue(fb.satisfied(LogicalClock.DEFAULT.currentStep())); + assertTrue(fa.satisfied(updateGraph.clock().currentStep())); + updateGraph.flushOneNotificationForUnitTests(); + assertTrue(fb.satisfied(updateGraph.clock().currentStep())); - assertFalse(joined.satisfied(LogicalClock.DEFAULT.currentStep())); + assertFalse(joined.satisfied(updateGraph.clock().currentStep())); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); showWithRowSet(sentSum); int[] actual = (int[]) DataAccessHelpers.getColumn(sentSum, "SS").getDirect(); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestTailInitializationFilter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestTailInitializationFilter.java index 7dacfdad61a..6d66861589e 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestTailInitializationFilter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestTailInitializationFilter.java @@ -3,13 +3,14 @@ */ package io.deephaven.engine.table.impl.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.Table; -import io.deephaven.engine.testutil.sources.InstantTestSource; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.sources.InstantTestSource; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.table.impl.QueryTable; @@ -42,11 +43,12 @@ public void testSimple() { final Table slice0_100_filtered = input.slice(0, 100).where("Timestamp >= '" + threshold1 + "'"); final Table slice100_200_filtered = input.slice(100, 200).where("Timestamp >= '" + threshold2 + "'"); - final Table expected = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.merge(slice0_100_filtered, slice100_200_filtered)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Table expected = updateGraph.sharedLock().computeLocked( + () -> TableTools.merge(slice0_100_filtered, slice100_200_filtered)); assertTableEquals(filtered, expected); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final Instant[] data2 = new Instant[4]; data2[0] = DateTimeUtils.parseInstant("2020-08-20T06:00:00 NY"); data2[1] = DateTimeUtils.parseInstant("2020-08-20T06:30:00 NY"); @@ -61,7 +63,7 @@ public void testSimple() { final Table slice100_102 = input.slice(100, 102); final Table slice102_202_filtered = input.slice(102, 202).where("Timestamp >= '" + threshold2 + "'"); final Table slice202_204 = input.slice(202, 204); - final Table expected2 = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked( + final Table expected2 = updateGraph.sharedLock().computeLocked( () -> TableTools.merge(slice0_100_filtered, slice100_102, slice102_202_filtered, slice202_204)); assertTableEquals(filtered, expected2); } diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestTimeSeriesFilter.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestTimeSeriesFilter.java index 199faa5995f..9b5105b8f6b 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestTimeSeriesFilter.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/util/TestTimeSeriesFilter.java @@ -3,11 +3,12 @@ */ package io.deephaven.engine.table.impl.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.ColumnInfo; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.generator.DateGenerator; import io.deephaven.engine.testutil.generator.IntGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.EvalNugget; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; @@ -45,7 +46,8 @@ public void testSimple() { TableTools.show(filtered); assertEquals(10, filtered.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { timeSeriesFilter.incrementNow(5000); timeSeriesFilter.run(); }); @@ -53,7 +55,7 @@ public void testSimple() { TableTools.show(filtered); assertEquals(10, filtered.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { timeSeriesFilter.incrementNow(5000); timeSeriesFilter.run(); }); @@ -61,7 +63,7 @@ public void testSimple() { TableTools.show(filtered); assertEquals(5, filtered.size()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { timeSeriesFilter.incrementNow(2000); timeSeriesFilter.run(); }); @@ -87,18 +89,16 @@ public void testIncremental() throws ParseException { new UnitTestTimeSeriesFilter(startDate.getTime(), "Date", "PT01:00:00"); final ArrayList> filtersToRefresh = new ArrayList<>(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); EvalNugget[] en = new EvalNugget[] { - new EvalNugget() { - public Table e() { - UnitTestTimeSeriesFilter unitTestTimeSeriesFilter1 = - new UnitTestTimeSeriesFilter(unitTestTimeSeriesFilter); - filtersToRefresh.add(new WeakReference<>(unitTestTimeSeriesFilter1)); - return UpdateGraphProcessor.DEFAULT.exclusiveLock() - .computeLocked(() -> table - .update("Date=DateTimeUtils.epochNanosToInstant(Date.getTime() * 1000000L)") - .where(unitTestTimeSeriesFilter1)); - } - }, + EvalNugget.from(() -> { + UnitTestTimeSeriesFilter unitTestTimeSeriesFilter1 = + new UnitTestTimeSeriesFilter(unitTestTimeSeriesFilter); + filtersToRefresh.add(new WeakReference<>(unitTestTimeSeriesFilter1)); + return updateGraph.exclusiveLock().computeLocked( + () -> table.update("Date=DateTimeUtils.epochNanosToInstant(Date.getTime() * 1000000L)") + .where(unitTestTimeSeriesFilter1)); + }), }; @@ -107,7 +107,7 @@ public Table e() { if (ii % (updatesPerTick + 1) > 0) { simulateShiftAwareStep(size, random, table, columnInfo, en); } else { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { unitTestTimeSeriesFilter.incrementNow(3600 * 1000); final ArrayList> collectedRefs = new ArrayList<>(); diff --git a/engine/table/src/test/java/io/deephaven/engine/table/impl/verify/TestTableAssertions.java b/engine/table/src/test/java/io/deephaven/engine/table/impl/verify/TestTableAssertions.java index 60379c48a5a..1691ab308bb 100644 --- a/engine/table/src/test/java/io/deephaven/engine/table/impl/verify/TestTableAssertions.java +++ b/engine/table/src/test/java/io/deephaven/engine/table/impl/verify/TestTableAssertions.java @@ -3,17 +3,14 @@ */ package io.deephaven.engine.table.impl.verify; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.SortedColumnsAttribute; import io.deephaven.engine.table.impl.SortingOrder; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.EvalNugget; -import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.engine.testutil.GenerateTableUpdates; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SortedLongGenerator; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.util.QueryConstants; @@ -69,7 +66,8 @@ public void testRefreshing() { assertTableEquals(test, testPlant); assertTableEquals(test, testInt); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(test, i(11)); addToTable(test, i(11), stringCol("Plant", "Berry"), intCol("Int", 6)); test.notifyListeners(i(11), i(11), i()); @@ -78,7 +76,7 @@ public void testRefreshing() { assertTableEquals(test, testInt); assertTableEquals(test, testPlant); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(test, i(9, 13, 18), stringCol("Plant", "Aaple", "DAFODIL", "Forsythia"), intCol("Int", 10, 4, 0)); TableTools.showWithRowSet(test); @@ -110,7 +108,7 @@ public void testIncrementalRandom(int seed, int size) { // final Random random1 = new Random(0); // QueryScope.addParam("random1", random); - // final Table badTable = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> + // final Table badTable = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(() -> // table.update("RV=random1.nextDouble() < 0.00001 ? -1L : SortValue")); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { @@ -135,10 +133,10 @@ protected Table e() { }, }; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < maxSteps; step++) { - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, size, random, table, columnInfo)); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( + GenerateTableUpdates.DEFAULT_PROFILE, size, random, table, columnInfo)); validate(en); } // } finally { diff --git a/engine/table/src/test/java/io/deephaven/engine/util/TestCompileSimpleFunction.java b/engine/table/src/test/java/io/deephaven/engine/util/TestCompileSimpleFunction.java index 8c5a5a1bace..fea7cee27b1 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/TestCompileSimpleFunction.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/TestCompileSimpleFunction.java @@ -3,24 +3,10 @@ */ package io.deephaven.engine.util; -import io.deephaven.engine.context.TestExecutionContext; -import io.deephaven.util.SafeCloseable; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import junit.framework.TestCase; -public class TestCompileSimpleFunction extends TestCase { - private SafeCloseable executionContext; - - @Override - protected void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } +public class TestCompileSimpleFunction extends RefreshingTableTestCase { public void testString() { String res = DynamicCompileUtils.compileSimpleFunction(String.class, "return \"Hello, world\"").get(); diff --git a/engine/table/src/test/java/io/deephaven/engine/util/TestJobScheduler.java b/engine/table/src/test/java/io/deephaven/engine/util/TestJobScheduler.java index 7b6727a9de0..a43de11a544 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/TestJobScheduler.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/TestJobScheduler.java @@ -6,8 +6,11 @@ import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.exceptions.CancellationException; import io.deephaven.engine.table.impl.util.JobScheduler; -import io.deephaven.engine.table.impl.util.UpdateGraphProcessorJobScheduler; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.table.impl.util.UpdateGraphJobScheduler; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.testutil.junit4.EngineCleanup; +import io.deephaven.engine.updategraph.UpdateGraph; +import org.junit.Rule; import org.junit.Test; import java.util.concurrent.CompletableFuture; @@ -16,16 +19,20 @@ public final class TestJobScheduler { + @Rule + public final EngineCleanup cleanup = new EngineCleanup(); + @Test public void testParallel() { final CompletableFuture waitForResult = new CompletableFuture<>(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the set for the first 50 + updateGraph.runWithinUnitTestCycle(() -> { final boolean[] completed = new boolean[100]; - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateParallel( ExecutionContext.getContext(), null, @@ -65,12 +72,13 @@ public void testParallel() { public void testParallelWithResume() { final CompletableFuture waitForResult = new CompletableFuture<>(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the set for the first 50 + updateGraph.runWithinUnitTestCycle(() -> { final boolean[] completed = new boolean[100]; - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateParallel( ExecutionContext.getContext(), null, @@ -114,9 +122,12 @@ public void testParallelWithContext() { final CompletableFuture waitForResult = new CompletableFuture<>(); final AtomicInteger openCount = new AtomicInteger(0); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the type is correct + // verify true for the first 50 + // verify false for the next 50 + updateGraph.runWithinUnitTestCycle(() -> { class TestJobThreadContext implements JobScheduler.JobThreadContext { TestJobThreadContext() { @@ -131,7 +142,7 @@ public void close() { final boolean[] completed = new boolean[100]; - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateParallel( ExecutionContext.getContext(), null, @@ -180,12 +191,14 @@ public void close() { public void testSerialWithResume() { final CompletableFuture waitForResult = new CompletableFuture<>(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify true for the first 50 + // verify false for the next 50 + updateGraph.runWithinUnitTestCycle(() -> { final boolean[] completed = new boolean[100]; - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateSerial( ExecutionContext.getContext(), null, @@ -230,9 +243,12 @@ public void testSerialWithContext() { final CompletableFuture waitForResult = new CompletableFuture<>(); final AtomicInteger openCount = new AtomicInteger(0); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the type is correct + // verify true for the first 50 + // verify false for the next 50 + updateGraph.runWithinUnitTestCycle(() -> { class TestJobThreadContext implements JobScheduler.JobThreadContext { TestJobThreadContext() { @@ -247,7 +263,7 @@ public void close() { final boolean[] completed = new boolean[100]; - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateSerial( ExecutionContext.getContext(), null, @@ -296,11 +312,12 @@ public void close() { public void testSerialEmpty() { final CompletableFuture waitForResult = new CompletableFuture<>(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // nop + updateGraph.runWithinUnitTestCycle(() -> { - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateSerial( ExecutionContext.getContext(), null, @@ -333,11 +350,13 @@ public void testSerialEmpty() { public void testParallelEmpty() { final CompletableFuture waitForResult = new CompletableFuture<>(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // nop + UpdateGraph updateGraph1 = ExecutionContext.getContext().getUpdateGraph(); + updateGraph1.cast().runWithinUnitTestCycle(() -> { - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateParallel( ExecutionContext.getContext(), null, @@ -371,9 +390,12 @@ public void testParallelError() { final CompletableFuture waitForResult = new CompletableFuture<>(); final AtomicInteger openCount = new AtomicInteger(0); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the type is correct + // throw before "doing work" to make verification easy + // if this is called, we failed the test + updateGraph.runWithinUnitTestCycle(() -> { final boolean[] completed = new boolean[50]; class TestJobThreadContext implements JobScheduler.JobThreadContext { @@ -387,7 +409,7 @@ public void close() { } } - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateParallel( ExecutionContext.getContext(), null, @@ -442,9 +464,13 @@ public void testSerialError() { final CompletableFuture waitForResult = new CompletableFuture<>(); final AtomicInteger openCount = new AtomicInteger(0); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the type is correct + // throw after this is set to make verification easy + // if this is called, we failed the test + // assert that the job was terminated before all tasks were executed (one is still false) + updateGraph.runWithinUnitTestCycle(() -> { final boolean[] completed = new boolean[100]; class TestJobThreadContext implements JobScheduler.JobThreadContext { @@ -458,7 +484,7 @@ public void close() { } } - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateSerial( ExecutionContext.getContext(), null, @@ -521,9 +547,12 @@ public void testNestedParallelError() { final CompletableFuture waitForResult = new CompletableFuture<>(); final AtomicInteger openCount = new AtomicInteger(0); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the type is correct + // throw before "doing work" to make verification easy + // if this is called, we failed the test + updateGraph.runWithinUnitTestCycle(() -> { final boolean[][] completed = new boolean[50][60]; class TestJobThreadContext implements JobScheduler.JobThreadContext { @@ -537,30 +566,32 @@ public void close() { } } - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateParallel( ExecutionContext.getContext(), null, TestJobThreadContext::new, 0, 50, - (context1, idx1, nec1, r1) -> scheduler.iterateParallel( - ExecutionContext.getContext(), - null, - TestJobThreadContext::new, - 0, - 60, - (context2, idx2, nec2) -> { - // verify the type is correct - Assert.instanceOf(context2, "context2", TestJobThreadContext.class); - - // throw before "doing work" to make verification easy - if (idx1 == 10 && idx2 == 10) { - throw new IndexOutOfBoundsException("Test error"); - } + (context1, idx1, nec1, r1) -> { + scheduler.iterateParallel( + ExecutionContext.getContext(), + null, + TestJobThreadContext::new, + 0, + 60, + (context2, idx2, nec2) -> { + // verify the type is correct + Assert.instanceOf(context2, "context2", TestJobThreadContext.class); - completed[idx1][idx2] = true; - }, r1, nec1), + // throw before "doing work" to make verification easy + if (idx1 == 10 && idx2 == 10) { + throw new IndexOutOfBoundsException("Test error"); + } + + completed[idx1][idx2] = true; + }, r1, nec1); + }, () -> { // if this is called, we failed the test waitForResult.completeExceptionally(new AssertionFailure("Exception not thrown")); @@ -598,9 +629,11 @@ public void testNestedParallelChainedError() { final CompletableFuture waitForResult = new CompletableFuture<>(); final AtomicInteger openCount = new AtomicInteger(0); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the type is correct + // if this is called, we failed the test + updateGraph.runWithinUnitTestCycle(() -> { final boolean[][] completed = new boolean[50][60]; class TestJobThreadContext implements JobScheduler.JobThreadContext { @@ -614,7 +647,7 @@ public void close() { } } - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateParallel( ExecutionContext.getContext(), null, @@ -674,9 +707,10 @@ public void testNestedParallelChainedOnCompleteError() { final CompletableFuture waitForResult = new CompletableFuture<>(); final AtomicInteger openCount = new AtomicInteger(0); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.resetForUnitTests(false, true, 0, 4, 10, 5); + // verify the type is correct + updateGraph.runWithinUnitTestCycle(() -> { final boolean[][] completed = new boolean[50][60]; class TestJobThreadContext implements JobScheduler.JobThreadContext { @@ -690,24 +724,26 @@ public void close() { } } - final JobScheduler scheduler = new UpdateGraphProcessorJobScheduler(); + final JobScheduler scheduler = new UpdateGraphJobScheduler(updateGraph); scheduler.iterateParallel( ExecutionContext.getContext(), null, TestJobThreadContext::new, 0, 50, - (context1, idx1, nec1, r1) -> scheduler.iterateParallel( - ExecutionContext.getContext(), - null, - TestJobThreadContext::new, - 0, - 60, - (context2, idx2, nec2) -> { - // verify the type is correct - Assert.instanceOf(context2, "context2", TestJobThreadContext.class); - completed[idx1][idx2] = true; - }, r1, nec1), + (context1, idx1, nec1, r1) -> { + scheduler.iterateParallel( + ExecutionContext.getContext(), + null, + TestJobThreadContext::new, + 0, + 60, + (context2, idx2, nec2) -> { + // verify the type is correct + Assert.instanceOf(context2, "context2", TestJobThreadContext.class); + completed[idx1][idx2] = true; + }, r1, nec1); + }, () -> { throw new IllegalStateException("Intentional completion failure"); }, diff --git a/engine/table/src/test/java/io/deephaven/engine/util/TestTableTools.java b/engine/table/src/test/java/io/deephaven/engine/util/TestTableTools.java index 8f78c986bff..adfcd00bd79 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/TestTableTools.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/TestTableTools.java @@ -5,12 +5,8 @@ import io.deephaven.chunk.IntChunk; import io.deephaven.chunk.attributes.Values; -import io.deephaven.configuration.Configuration; import io.deephaven.datastructures.util.CollectionUtil; -import io.deephaven.engine.context.QueryCompiler; -import io.deephaven.engine.context.TestExecutionContext; -import io.deephaven.engine.liveness.LivenessScope; -import io.deephaven.engine.liveness.LivenessScopeStack; +import io.deephaven.engine.context.*; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.RowSetShiftData; @@ -20,27 +16,24 @@ import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.TableUpdateImpl; import io.deephaven.engine.table.impl.UpdateErrorReporter; -import io.deephaven.engine.table.impl.perf.UpdatePerformanceTracker; import io.deephaven.engine.table.impl.sources.UnionRedirection; -import io.deephaven.engine.table.impl.util.AsyncClientErrorNotifier; import io.deephaven.engine.table.impl.util.ColumnHolder; import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.DoubleGenerator; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.SortedIntGenerator; import io.deephaven.engine.testutil.generator.StringGenerator; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.testutil.rowset.RowSetTstUtils; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.LogicalClockImpl; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; import io.deephaven.util.ExceptionDetails; import io.deephaven.util.QueryConstants; -import io.deephaven.util.SafeCloseable; import junit.framework.TestCase; -import org.junit.After; import org.junit.Assert; import org.junit.Before; +import org.junit.Rule; import org.junit.experimental.categories.Category; import java.time.Instant; @@ -59,16 +52,8 @@ @Category(OutOfBandTest.class) public class TestTableTools extends TestCase implements UpdateErrorReporter { - private static final boolean ENABLE_QUERY_COMPILER_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(TestTableTools.class, "QueryCompiler.logEnabled", false); - - private UpdateErrorReporter oldReporter; - - private boolean oldCheckUgp; - private boolean oldLogEnabled; - - private LivenessScope scope; - private SafeCloseable executionContext; + @Rule + public final EngineCleanup framework = new EngineCleanup(); private Table table1; private Table table2; @@ -78,18 +63,6 @@ public class TestTableTools extends TestCase implements UpdateErrorReporter { public void setUp() throws Exception { super.setUp(); - oldCheckUgp = UpdateGraphProcessor.DEFAULT.setCheckTableOperations(false); - oldLogEnabled = QueryCompiler.setLogEnabled(ENABLE_QUERY_COMPILER_LOGGING); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - UpdatePerformanceTracker.getInstance().enableUnitTestMode(); - - scope = new LivenessScope(); - executionContext = TestExecutionContext.createForUnitTests().open(); - LivenessScopeStack.push(scope); - - oldReporter = AsyncClientErrorNotifier.setReporter(this); - table1 = testRefreshingTable(TstUtils.i(2, 3, 6, 7, 8, 10, 12, 15, 16).toTracking(), col("StringKeys", "key1", "key1", "key1", "key1", "key2", "key2", "key2", "key2", "key2"), col("GroupedInts", 1, 1, 2, 2, 2, 3, 3, 3, 3)); @@ -101,19 +74,6 @@ public void setUp() throws Exception { } - @After - public void tearDown() throws Exception { - super.tearDown(); - - LivenessScopeStack.pop(scope); - scope.release(); - executionContext.close(); - QueryCompiler.setLogEnabled(oldLogEnabled); - UpdateGraphProcessor.DEFAULT.setCheckTableOperations(oldCheckUgp); - AsyncClientErrorNotifier.setReporter(oldReporter); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } - @Override public void reportUpdateError(Throwable t) { System.err.println("Received error notification: " + new ExceptionDetails(t).getFullStackTrace()); @@ -465,29 +425,30 @@ protected Table e() { .update("A=1")) }; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int i = 0; i < 20; i++) { System.out.println("Step = " + i); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> addRows(random, table1)); + updateGraph.runWithinUnitTestCycle(() -> addRows(random, table1)); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> addRows(random, table2)); + updateGraph.runWithinUnitTestCycle(() -> addRows(random, table2)); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> addRows(random, table3)); + updateGraph.runWithinUnitTestCycle(() -> addRows(random, table3)); TstUtils.validate(en); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> addRows(random, table1)); + updateGraph.runWithinUnitTestCycle(() -> addRows(random, table1)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> addRows(random, table2)); + updateGraph.runWithinUnitTestCycle(() -> addRows(random, table2)); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> addRows(random, table3)); + updateGraph.runWithinUnitTestCycle(() -> addRows(random, table3)); TstUtils.validate(en); } } public void testMergeIterative2() { - LogicalClock clock = LogicalClock.DEFAULT; + LogicalClockImpl clock = (LogicalClockImpl) ExecutionContext.getContext().getUpdateGraph().clock(); Random random = new Random(0); ColumnInfo[] info1; @@ -557,8 +518,9 @@ protected Table e() { boolean mod2 = random.nextBoolean(); boolean mod3 = random.nextBoolean(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); if (mod1) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + updateGraph.runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(size, random, table1, info1)); } else { clock.startUpdateCycle(); @@ -566,7 +528,7 @@ protected Table e() { } if (mod2) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + updateGraph.runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(size, random, table2, info2)); } else { clock.startUpdateCycle(); @@ -574,7 +536,7 @@ protected Table e() { } if (mod3) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( + updateGraph.runWithinUnitTestCycle( () -> GenerateTableUpdates.generateTableUpdates(size, random, table3, info3)); } else { clock.startUpdateCycle(); @@ -660,7 +622,8 @@ public void testMergeWithNestedShift() { for (int ii = 1; ii < 10; ++ii) { final int fii = PRIME * ii; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(fii), col("Sentinel", fii)); table.notifyListeners(i(fii), i(), i()); }); @@ -678,7 +641,8 @@ public void testMergeWithShiftBoundary() { showWithRowSet(result); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(ONE_MILLION - 11), col("Sentinel", 1)); removeRows(table, i(ONE_MILLION - 1)); final TableUpdateImpl update = new TableUpdateImpl(); @@ -712,9 +676,10 @@ public void testMergeShiftsEmptyTable() { final int PRIME = 61409; Assert.assertTrue(2 * PRIME > UnionRedirection.ALLOCATION_UNIT_ROW_KEYS); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 1; ii < 10; ++ii) { final int fii = 2 * PRIME * ii + 1; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final long currKey = table.getRowSet().lastRowKey(); removeRows(table, i(currKey)); addToTable(table, i(fii), col("Sentinel", 1)); @@ -746,7 +711,8 @@ public void testMergeShiftBoundary() { final Table expected = TableTools.newTable(intCol("Sentinel", 1, 3)); assertTableEquals(expected, m2); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { removeRows(table1, i(65538)); addToTable(table1, i(65537), col("Sentinel", 2)); @@ -786,9 +752,11 @@ public void testMergeDeepShifts() { final int SHIFT_SIZE = 4 * 61409; Assert.assertTrue(SHIFT_SIZE > UnionRedirection.ALLOCATION_UNIT_ROW_KEYS); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 1; ii < 10; ++ii) { final int fii = SHIFT_SIZE * ii + 1; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // Manually apply shift. + updateGraph.runWithinUnitTestCycle(() -> { final long currKey = table.getRowSet().lastRowKey(); // Manually apply shift. removeRows(table, i(currKey)); @@ -910,9 +878,10 @@ public void onUpdate(final TableUpdate upstream) { protected void onFailureInternal(Throwable originalException, Entry sourceEntry) {} }); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int ii = 1; ii < 100; ++ii) { final int fii = PRIME * ii; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(table, i(fii), col("Sentinel", fii)); table.notifyListeners(i(fii), i(), i()); }); @@ -982,11 +951,12 @@ public void testMergeIndexShiftingPerformance() { final long start = System.currentTimeMillis(); long stepStart = start; + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); for (int step = 0; step < 150; ++step) { final int stepSize = 20; final int firstNextIdx = (step * stepSize) + 1; final int lastNextIdx = ((step + 1) * stepSize); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final RowSet addRowSet = RowSetFactory.fromRange(firstNextIdx, lastNextIdx); final int[] addInts = new int[stepSize]; diff --git a/engine/table/src/test/java/io/deephaven/engine/util/TestToMapListener.java b/engine/table/src/test/java/io/deephaven/engine/util/TestToMapListener.java index defa7bdf848..2ef81d57d13 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/TestToMapListener.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/TestToMapListener.java @@ -3,10 +3,11 @@ */ package io.deephaven.engine.util; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import static io.deephaven.engine.testutil.TstUtils.*; import static io.deephaven.engine.util.TableTools.col; @@ -32,7 +33,8 @@ public void testToMap() { assertEquals("K", tml.get("D")); assertNull(tml.get("E")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { addToTable(source, i(10), col("Sentinel", "E"), col("Sentinel2", "L")); source.notifyListeners(i(10), i(), i()); @@ -49,7 +51,7 @@ public void testToMap() { assertEquals("K", tml.get("D")); assertEquals("L", tml.get("E")); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { addToTable(source, i(10), col("Sentinel", "E"), col("Sentinel2", "M")); removeRows(source, i(2)); source.notifyListeners(i(), i(2), i(10)); diff --git a/engine/table/src/test/java/io/deephaven/engine/util/TestWindowCheck.java b/engine/table/src/test/java/io/deephaven/engine/util/TestWindowCheck.java index 6657ad93075..26d26b477d3 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/TestWindowCheck.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/TestWindowCheck.java @@ -4,6 +4,7 @@ package io.deephaven.engine.util; import io.deephaven.base.Pair; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.Table; @@ -12,15 +13,10 @@ import io.deephaven.engine.table.impl.InstrumentedTableUpdateListener; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.TableUpdateValidator; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.engine.testutil.GenerateTableUpdates; -import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.IntGenerator; import io.deephaven.engine.testutil.generator.UnsortedInstantGenerator; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; @@ -89,13 +85,14 @@ private void testWindowCheckIterative(int seed) { clock.now = DateTimeUtils.epochNanos(startTime); final WindowEvalNugget[] en; - UpdateGraphProcessor.DEFAULT.exclusiveLock().lock(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.exclusiveLock().lock(); try { en = new WindowEvalNugget[] { new WindowEvalNugget(clock, table) }; } finally { - UpdateGraphProcessor.DEFAULT.exclusiveLock().unlock(); + updateGraph.exclusiveLock().unlock(); } final int stepsPerTick = 1; @@ -107,14 +104,14 @@ private void testWindowCheckIterative(int seed) { final boolean combined = combinedRandom.nextBoolean(); if (combined) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { advanceTime(clock, en); GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, size, random, table, columnInfo); }); TstUtils.validate("Step " + step, en); } else { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> advanceTime(clock, en)); + updateGraph.runWithinUnitTestCycle(() -> advanceTime(clock, en)); if (RefreshingTableTestCase.printTableUpdates) { TstUtils.validate("Step = " + step + " time = " + DateTimeUtils.epochNanosToInstant(clock.now), en); } @@ -152,13 +149,14 @@ public void testWindowCheckEmptyInitial() { final QueryTable tableToCheck = testRefreshingTable(i().toTracking(), col("Timestamp", emptyInstantArray), intCol("Sentinel")); - final Pair windowed = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> WindowCheck.addTimeWindowInternal(clock, tableToCheck, "Timestamp", + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Pair windowed = updateGraph.sharedLock().computeLocked( + () -> WindowCheck.addTimeWindowInternal(clock, tableToCheck, "Timestamp", DateTimeUtils.SECOND * 60, "InWindow", false)); TableTools.showWithRowSet(windowed.first); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(windowed.second::run); + updateGraph.runWithinUnitTestCycle(windowed.second::run); } @@ -175,13 +173,14 @@ public void testWindowCheckGetPrev() { col("Timestamp", initialValues), intCol("Sentinel", 1, 2, 3, 4)); - final Pair windowed = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> WindowCheck.addTimeWindowInternal( + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Pair windowed = updateGraph.sharedLock().computeLocked( + () -> WindowCheck.addTimeWindowInternal( timeProvider, tableToCheck, "Timestamp", DateTimeUtils.SECOND * 60, "InWindow", false)); TableTools.showWithRowSet(windowed.first); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(windowed.second::run); + updateGraph.runWithinUnitTestCycle(windowed.second::run); assertTableEquals(tableToCheck.updateView("InWindow = Sentinel == 4 ? null : Sentinel >= 2"), windowed.first); @@ -195,13 +194,13 @@ public void testWindowCheckGetPrev() { Assert.assertEquals(resultSource.getPrev(2), Boolean.TRUE); Assert.assertNull(resultSource.getPrev(3)); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + updateGraph.startCycleForUnitTests(); timeProvider.now = DateTimeUtils.epochNanos(DateTimeUtils.parseInstant("2022-07-14T09:34:00 NY")); windowed.second.run(); - while (((QueryTable) windowed.first).getLastNotificationStep() < LogicalClock.DEFAULT.currentStep()) { - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + while (((QueryTable) windowed.first).getLastNotificationStep() < updateGraph.clock().currentStep()) { + updateGraph.flushOneNotificationForUnitTests(); } Assert.assertEquals(resultSource.get(0), Boolean.FALSE); @@ -213,7 +212,7 @@ public void testWindowCheckGetPrev() { Assert.assertEquals(resultSource.getPrev(2), Boolean.TRUE); Assert.assertNull(resultSource.getPrev(3)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); } @Test @@ -231,13 +230,14 @@ public void testWindowCheckStatic() { intCol("Sentinel", 1, 2, 3, 4)); Assert.assertFalse(tableToCheck.isRefreshing()); - final Pair windowed = - UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> WindowCheck.addTimeWindowInternal( + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + final Pair windowed = updateGraph.sharedLock().computeLocked( + () -> WindowCheck.addTimeWindowInternal( timeProvider, tableToCheck, "Timestamp", DateTimeUtils.SECOND * 60, "InWindow", false)); TableTools.showWithRowSet(windowed.first); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(windowed.second::run); + updateGraph.runWithinUnitTestCycle(windowed.second::run); assertTableEquals(tableToCheck.updateView("InWindow = Sentinel == 4 ? null : Sentinel >= 2"), windowed.first); @@ -251,13 +251,13 @@ public void testWindowCheckStatic() { Assert.assertEquals(resultSource.getPrev(2), Boolean.TRUE); Assert.assertNull(resultSource.getPrev(3)); - UpdateGraphProcessor.DEFAULT.startCycleForUnitTests(); + updateGraph.startCycleForUnitTests(); timeProvider.now = DateTimeUtils.epochNanos(DateTimeUtils.parseInstant("2022-07-14T09:34:00 NY")); windowed.second.run(); - while (((QueryTable) windowed.first).getLastNotificationStep() < LogicalClock.DEFAULT.currentStep()) { - UpdateGraphProcessor.DEFAULT.flushOneNotificationForUnitTests(); + while (((QueryTable) windowed.first).getLastNotificationStep() < updateGraph.clock().currentStep()) { + updateGraph.flushOneNotificationForUnitTests(); } Assert.assertEquals(resultSource.get(0), Boolean.FALSE); @@ -269,7 +269,7 @@ public void testWindowCheckStatic() { Assert.assertEquals(resultSource.getPrev(2), Boolean.TRUE); Assert.assertNull(resultSource.getPrev(3)); - UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests(); + updateGraph.completeCycleForUnitTests(); } @Test diff --git a/engine/table/src/test/java/io/deephaven/engine/util/TimeTableTest.java b/engine/table/src/test/java/io/deephaven/engine/util/TimeTableTest.java index c15bb4635c7..8d79f13d3b0 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/TimeTableTest.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/TimeTableTest.java @@ -7,17 +7,18 @@ import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Any; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.engine.table.ChunkSource; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.impl.QueryTable; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.table.impl.TableUpdateValidator; import io.deephaven.engine.table.impl.TimeTable; import io.deephaven.engine.table.impl.sources.FillUnordered; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.UpdateSourceCombiner; import io.deephaven.time.DateTimeUtils; import org.junit.Assert; @@ -40,7 +41,7 @@ public void setUp() throws Exception { super.setUp(); clock = new TestClock(0); - updateSourceCombiner = new UpdateSourceCombiner(); + updateSourceCombiner = new UpdateSourceCombiner(ExecutionContext.getContext().getUpdateGraph()); } @Override @@ -67,7 +68,8 @@ private void build(TimeTable.Builder builder) { private void tick(long tm) { clock.now = tm; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); validator.validate(); } diff --git a/engine/table/src/test/java/io/deephaven/engine/util/scripts/TestGroovyDeephavenSession.java b/engine/table/src/test/java/io/deephaven/engine/util/scripts/TestGroovyDeephavenSession.java index cf491bfd60b..342ef39932a 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/scripts/TestGroovyDeephavenSession.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/scripts/TestGroovyDeephavenSession.java @@ -3,23 +3,25 @@ */ package io.deephaven.engine.util.scripts; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.GroovyDeephavenSession; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.util.ScriptSession; import io.deephaven.plugin.type.ObjectTypeLookup.NoOp; import org.apache.commons.lang3.mutable.MutableInt; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.*; import java.io.IOException; public class TestGroovyDeephavenSession { + @Rule + public final EngineCleanup framework = new EngineCleanup(); + private LivenessScope livenessScope; private GroovyDeephavenSession session; @@ -27,7 +29,9 @@ public class TestGroovyDeephavenSession { public void setup() throws IOException { livenessScope = new LivenessScope(); LivenessScopeStack.push(livenessScope); - session = new GroovyDeephavenSession(NoOp.INSTANCE, null, GroovyDeephavenSession.RunScripts.none()); + session = new GroovyDeephavenSession( + ExecutionContext.getContext().getUpdateGraph(), NoOp.INSTANCE, null, + GroovyDeephavenSession.RunScripts.none()); } @After diff --git a/engine/table/src/test/java/io/deephaven/engine/util/systemicmarking/TestSystemicObjectMarking.java b/engine/table/src/test/java/io/deephaven/engine/util/systemicmarking/TestSystemicObjectMarking.java index 5a6d2fc6bda..2748f0d7612 100644 --- a/engine/table/src/test/java/io/deephaven/engine/util/systemicmarking/TestSystemicObjectMarking.java +++ b/engine/table/src/test/java/io/deephaven/engine/util/systemicmarking/TestSystemicObjectMarking.java @@ -3,13 +3,15 @@ */ package io.deephaven.engine.util.systemicmarking; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.ErrorListener; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.select.FormulaEvaluationException; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.TableTools; import junit.framework.TestCase; @@ -21,12 +23,18 @@ public class TestSystemicObjectMarking extends RefreshingTableTestCase { public void testSystemicObjectMarking() { final QueryTable source = TstUtils.testRefreshingTable(col("Str", "a", "b"), col("Str2", "A", "B")); - final Table updated = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> source.update("UC=Str.toUpperCase()")); - final Table updated2 = SystemicObjectTracker.executeSystemically(false, () -> UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> source.update("LC=Str2.toLowerCase()"))); + final Table updated = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> source.update("UC=Str.toUpperCase()")); + final Table updated2 = SystemicObjectTracker.executeSystemically(false, + () -> { + return ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked(() -> source.update("LC=Str2.toLowerCase()")); + }); TableTools.showWithRowSet(updated); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + UpdateGraph updateGraph12 = ExecutionContext.getContext().getUpdateGraph(); + UpdateGraph updateGraph3 = updateGraph12.cast(); + updateGraph3.cast().runWithinUnitTestCycle(() -> { TstUtils.addToTable(source, i(2, 3), col("Str", "c", "d"), col("Str2", "C", "D")); source.notifyListeners(i(2, 3), i(), i()); }); @@ -37,7 +45,9 @@ public void testSystemicObjectMarking() { final ErrorListener errorListener2 = new ErrorListener(updated2); updated2.addUpdateListener(errorListener2); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + UpdateGraph updateGraph11 = ExecutionContext.getContext().getUpdateGraph(); + UpdateGraph updateGraph2 = updateGraph11.cast(); + updateGraph2.cast().runWithinUnitTestCycle(() -> { TstUtils.addToTable(source, i(4, 5), col("Str", "e", "f"), col("Str2", "E", null)); source.notifyListeners(i(4, 5), i(), i()); }); @@ -58,10 +68,12 @@ public void testSystemicObjectMarking() { updated.addUpdateListener(errorListener); allowingError(() -> { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(source, i(7, 8), col("Str", "g", null), col("Str2", "G", "H")); - source.notifyListeners(i(7, 8), i(), i()); - }); + UpdateGraph updateGraph1 = ExecutionContext.getContext().getUpdateGraph(); + UpdateGraph updateGraph = updateGraph1.cast(); + updateGraph.cast().runWithinUnitTestCycle(() -> { + TstUtils.addToTable(source, i(7, 8), col("Str", "g", null), col("Str2", "G", "H")); + source.notifyListeners(i(7, 8), i(), i()); + }); }, TestSystemicObjectMarking::isNpe); assertTrue(updated.isFailed()); diff --git a/engine/table/src/test/java/io/deephaven/stream/TestStreamToBlinkTableAdapter.java b/engine/table/src/test/java/io/deephaven/stream/TestStreamToBlinkTableAdapter.java index bb9f845b070..2e9fc8ce93f 100644 --- a/engine/table/src/test/java/io/deephaven/stream/TestStreamToBlinkTableAdapter.java +++ b/engine/table/src/test/java/io/deephaven/stream/TestStreamToBlinkTableAdapter.java @@ -5,13 +5,15 @@ import io.deephaven.chunk.attributes.Values; import io.deephaven.datastructures.util.CollectionUtil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.time.DateTimeUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.impl.SimpleListener; @@ -20,8 +22,7 @@ import junit.framework.TestCase; import org.apache.commons.lang3.mutable.MutableBoolean; import org.jetbrains.annotations.NotNull; -import org.junit.After; -import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.time.Instant; @@ -30,16 +31,9 @@ import static io.deephaven.engine.util.TableTools.*; public class TestStreamToBlinkTableAdapter { - @Before - public void setUp() throws Exception { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - @After - public void tearDown() throws Exception { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); - } + @Rule + public final EngineCleanup framework = new EngineCleanup(); @Test public void testSimple() { @@ -50,15 +44,16 @@ public void testSimple() { final StreamPublisher streamPublisher = new DummyStreamPublisher(); - final StreamToBlinkTableAdapter adapter = - new StreamToBlinkTableAdapter(tableDefinition, streamPublisher, UpdateGraphProcessor.DEFAULT, "test"); + final StreamToBlinkTableAdapter adapter = new StreamToBlinkTableAdapter( + tableDefinition, streamPublisher, ExecutionContext.getContext().getUpdateGraph(), "test"); final Table result = adapter.table(); TstUtils.assertTableEquals(empty, result); final SimpleListener listener = new SimpleListener(result); result.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(0, listener.getCount()); @@ -87,7 +82,7 @@ public void testSimple() { TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(0, listener.getCount()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(1, listener.getCount()); TestCase.assertEquals(RowSetFactory.flat(2), listener.getUpdate().added()); TestCase.assertEquals(RowSetFactory.empty(), listener.getUpdate().removed()); @@ -100,7 +95,7 @@ public void testSimple() { TstUtils.assertTableEquals(expect1, result); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(1, listener.getCount()); @@ -111,15 +106,15 @@ public void testSimple() { TestCase.assertEquals(ModifiedColumnSet.EMPTY, listener.getUpdate().modifiedColumnSet()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(0, listener.getCount()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(0, listener.getCount()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(0, listener.getCount()); chunks[0] = woc = WritableObjectChunk.makeWritableChunk(2); @@ -158,7 +153,7 @@ public void testSimple() { adapter.accept(chunks); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(1, listener.getCount()); TestCase.assertEquals(RowSetFactory.flat(4), listener.getUpdate().added()); TestCase.assertEquals(RowSetFactory.empty(), listener.getUpdate().removed()); @@ -189,7 +184,7 @@ public void testSimple() { adapter.accept(chunks); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(1, listener.getCount()); TestCase.assertEquals(RowSetFactory.flat(2), listener.getUpdate().added()); TestCase.assertEquals(RowSetFactory.flat(4), listener.getUpdate().removed()); @@ -202,7 +197,7 @@ public void testSimple() { TstUtils.assertTableEquals(expect3, result); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(1, listener.getCount()); TestCase.assertEquals(RowSetFactory.empty(), listener.getUpdate().added()); @@ -212,7 +207,7 @@ public void testSimple() { TestCase.assertEquals(ModifiedColumnSet.EMPTY, listener.getUpdate().modifiedColumnSet()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(0, listener.getCount()); TstUtils.assertTableEquals(empty, result); } @@ -226,15 +221,16 @@ public void testWrappedTypes() { final StreamPublisher streamPublisher = new DummyStreamPublisher(); - final StreamToBlinkTableAdapter adapter = - new StreamToBlinkTableAdapter(tableDefinition, streamPublisher, UpdateGraphProcessor.DEFAULT, "test"); + final StreamToBlinkTableAdapter adapter = new StreamToBlinkTableAdapter(tableDefinition, streamPublisher, + ExecutionContext.getContext().getUpdateGraph(), "test"); final Table result = adapter.table(); TstUtils.assertTableEquals(empty, result); final SimpleListener listener = new SimpleListener(result); result.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(0, listener.getCount()); @@ -263,7 +259,7 @@ public void testWrappedTypes() { TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(0, listener.getCount()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(1, listener.getCount()); TestCase.assertEquals(RowSetFactory.flat(3), listener.getUpdate().added()); TestCase.assertEquals(RowSetFactory.empty(), listener.getUpdate().removed()); @@ -276,7 +272,7 @@ public void testWrappedTypes() { TstUtils.assertTableEquals(expect1, result); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(1, listener.getCount()); @@ -296,15 +292,16 @@ public void testArrayTypes() { final StreamPublisher streamPublisher = new DummyStreamPublisher(); - final StreamToBlinkTableAdapter adapter = - new StreamToBlinkTableAdapter(tableDefinition, streamPublisher, UpdateGraphProcessor.DEFAULT, "test"); + final StreamToBlinkTableAdapter adapter = new StreamToBlinkTableAdapter( + tableDefinition, streamPublisher, ExecutionContext.getContext().getUpdateGraph(), "test"); final Table result = adapter.table(); TstUtils.assertTableEquals(empty, result); final SimpleListener listener = new SimpleListener(result); result.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(0, listener.getCount()); @@ -323,7 +320,7 @@ public void testArrayTypes() { TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(0, listener.getCount()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(1, listener.getCount()); TestCase.assertEquals(RowSetFactory.flat(2), listener.getUpdate().added()); TestCase.assertEquals(RowSetFactory.empty(), listener.getUpdate().removed()); @@ -337,7 +334,7 @@ public void testArrayTypes() { TstUtils.assertTableEquals(expect1, result); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(1, listener.getCount()); @@ -355,15 +352,16 @@ public void testBig() { final StreamPublisher streamPublisher = new DummyStreamPublisher(); - final StreamToBlinkTableAdapter adapter = - new StreamToBlinkTableAdapter(tableDefinition, streamPublisher, UpdateGraphProcessor.DEFAULT, "test"); + final StreamToBlinkTableAdapter adapter = new StreamToBlinkTableAdapter( + tableDefinition, streamPublisher, ExecutionContext.getContext().getUpdateGraph(), "test"); final Table result = adapter.table(); TstUtils.assertTableEquals(empty, result); final SimpleListener listener = new SimpleListener(result); result.addUpdateListener(listener); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(0, listener.getCount()); @@ -391,7 +389,7 @@ public void testBig() { TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(0, listener.getCount()); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(1, listener.getCount()); TestCase.assertEquals(RowSetFactory.flat(4048), listener.getUpdate().added()); TestCase.assertEquals(RowSetFactory.empty(), listener.getUpdate().removed()); @@ -403,7 +401,7 @@ public void testBig() { TstUtils.assertTableEquals(expect1, result); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TstUtils.assertTableEquals(empty, result); TestCase.assertEquals(1, listener.getCount()); @@ -414,11 +412,11 @@ public void testBig() { TestCase.assertEquals(ModifiedColumnSet.EMPTY, listener.getUpdate().modifiedColumnSet()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(0, listener.getCount()); listener.reset(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertEquals(0, listener.getCount()); } @@ -429,8 +427,8 @@ public void testError() { List.of(String.class, int.class, long.class, double.class)); final DummyStreamPublisher streamPublisher = new DummyStreamPublisher(); - final StreamToBlinkTableAdapter adapter = - new StreamToBlinkTableAdapter(tableDefinition, streamPublisher, UpdateGraphProcessor.DEFAULT, "test"); + final StreamToBlinkTableAdapter adapter = new StreamToBlinkTableAdapter( + tableDefinition, streamPublisher, ExecutionContext.getContext().getUpdateGraph(), "test"); final Table result = adapter.table(); final MutableBoolean listenerFailed = new MutableBoolean(); @@ -443,7 +441,8 @@ public void onFailureInternal(Throwable originalException, Entry sourceEntry) { result.addUpdateListener(listener); streamPublisher.fail = true; - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(adapter::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(adapter::run); TestCase.assertTrue(listenerFailed.booleanValue()); } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/context/TestExecutionContext.java b/engine/test-utils/src/main/java/io/deephaven/engine/context/TestExecutionContext.java new file mode 100644 index 00000000000..11dfd2227ff --- /dev/null +++ b/engine/test-utils/src/main/java/io/deephaven/engine/context/TestExecutionContext.java @@ -0,0 +1,23 @@ +package io.deephaven.engine.context; + +import io.deephaven.auth.AuthContext; +import io.deephaven.engine.testutil.ControlledUpdateGraph; +import io.deephaven.engine.updategraph.UpdateGraph; + +public class TestExecutionContext { + public static ExecutionContext createForUnitTests() { + UpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph(); + if (!(updateGraph instanceof ControlledUpdateGraph)) { + updateGraph = new ControlledUpdateGraph(); + // noinspection resource + ExecutionContext.getContext().withUpdateGraph(updateGraph).open(); + } + return new ExecutionContext.Builder(new AuthContext.SuperUser()) + .markSystemic() + .newQueryScope() + .newQueryLibrary() + .setQueryCompiler(QueryCompiler.createForUnitTests()) + .setUpdateGraph(updateGraph) + .build(); + } +} diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/context/TestExecutionContextAccess.java b/engine/test-utils/src/main/java/io/deephaven/engine/context/TestExecutionContextAccess.java deleted file mode 100644 index 94c77fa256d..00000000000 --- a/engine/test-utils/src/main/java/io/deephaven/engine/context/TestExecutionContextAccess.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.deephaven.engine.context; - -import io.deephaven.auth.AuthContext; - -public class TestExecutionContextAccess { - public static ExecutionContext createForUnitTests() { - return new ExecutionContext.Builder(new AuthContext.SuperUser()) - .markSystemic() - .newQueryScope() - .newQueryLibrary() - .setQueryCompiler(QueryCompiler.createForUnitTests()) - .build(); - } -} diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/ControlledUpdateGraph.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/ControlledUpdateGraph.java new file mode 100644 index 00000000000..620b5a7a873 --- /dev/null +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/ControlledUpdateGraph.java @@ -0,0 +1,10 @@ +package io.deephaven.engine.testutil; + +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; + +// TODO (deephaven-core#3886): Extract test functionality from PeriodicUpdateGraph +public class ControlledUpdateGraph extends PeriodicUpdateGraph { + public ControlledUpdateGraph() { + super("TEST", true, 1000, 25, -1); + } +} diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/QueryTableTestBase.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/QueryTableTestBase.java index d2a1c3cb478..6121df0b38e 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/QueryTableTestBase.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/QueryTableTestBase.java @@ -3,13 +3,13 @@ */ package io.deephaven.engine.testutil; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.ShiftObliviousInstrumentedListenerAdapter; import io.deephaven.engine.table.impl.util.ShiftObliviousUpdateCoalescer; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; import org.apache.commons.lang3.mutable.MutableInt; @@ -118,7 +118,8 @@ public String toString() { public void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, ColumnInfo[] leftColumnInfo, ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, Random random) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, leftSize, random, leftTable, leftColumnInfo); GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, rightSize, diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/junit4/EngineCleanup.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/junit4/EngineCleanup.java index 794dc6720d4..5c405b08367 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/junit4/EngineCleanup.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/junit4/EngineCleanup.java @@ -3,10 +3,8 @@ */ package io.deephaven.engine.testutil.junit4; -import io.deephaven.engine.context.TestExecutionContextAccess; import io.deephaven.engine.testutil.QueryTableTestBase; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.util.SafeCloseable; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runners.model.Statement; @@ -17,19 +15,6 @@ * instead create a {@code @Rule public final EngineCleanup field = new EngineCleanup();}. */ public class EngineCleanup extends QueryTableTestBase implements TestRule { - private SafeCloseable executionContext; - - @Override - public void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContextAccess.createForUnitTests().open(); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } // We use this class as a field in JUnit 4 tests which should not extend TestCase. This method is a no-op test // method so when we are detected as a JUnit3 test, we do not fail diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ByteTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ByteTestSource.java index 055f385d60d..9bd1afaa0c5 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ByteTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ByteTestSource.java @@ -14,12 +14,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.QueryConstants; @@ -39,12 +39,13 @@ */ public class ByteTestSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForByte, TestColumnSource { - private long lastAdditionTime = LogicalClock.DEFAULT.currentStep(); + + private long lastAdditionTime; protected final Long2ByteOpenHashMap data = new Long2ByteOpenHashMap(); protected Long2ByteOpenHashMap prevData; private final UpdateCommitter prevFlusher = - new UpdateCommitter<>(this, ByteTestSource::flushPrevious); + new UpdateCommitter<>(this, updateGraph, ByteTestSource::flushPrevious); // region empty constructor public ByteTestSource() { @@ -55,6 +56,7 @@ public ByteTestSource() { // region chunk constructor public ByteTestSource(RowSet rowSet, Chunk data) { super(byte.class); + lastAdditionTime = updateGraph.clock().currentStep(); add(rowSet, data); setDefaultReturnValue(this.data); this.prevData = this.data; @@ -113,7 +115,7 @@ public void accept(final long v) { // endregion chunk add private void maybeInitializePrevForStep() { - long currentStep = LogicalClock.DEFAULT.currentStep(); + long currentStep = updateGraph.clock().currentStep(); if (currentStep == lastAdditionTime) { return; } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/CharTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/CharTestSource.java index 0989eb3321d..aae47f4f85a 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/CharTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/CharTestSource.java @@ -9,12 +9,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.QueryConstants; @@ -34,12 +34,13 @@ */ public class CharTestSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForChar, TestColumnSource { - private long lastAdditionTime = LogicalClock.DEFAULT.currentStep(); + + private long lastAdditionTime; protected final Long2CharOpenHashMap data = new Long2CharOpenHashMap(); protected Long2CharOpenHashMap prevData; private final UpdateCommitter prevFlusher = - new UpdateCommitter<>(this, CharTestSource::flushPrevious); + new UpdateCommitter<>(this, updateGraph, CharTestSource::flushPrevious); // region empty constructor public CharTestSource() { @@ -50,6 +51,7 @@ public CharTestSource() { // region chunk constructor public CharTestSource(RowSet rowSet, Chunk data) { super(char.class); + lastAdditionTime = updateGraph.clock().currentStep(); add(rowSet, data); setDefaultReturnValue(this.data); this.prevData = this.data; @@ -108,7 +110,7 @@ public void accept(final long v) { // endregion chunk add private void maybeInitializePrevForStep() { - long currentStep = LogicalClock.DEFAULT.currentStep(); + long currentStep = updateGraph.clock().currentStep(); if (currentStep == lastAdditionTime) { return; } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/DoubleTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/DoubleTestSource.java index d19080d01da..0307150e65e 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/DoubleTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/DoubleTestSource.java @@ -14,12 +14,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.QueryConstants; @@ -39,12 +39,13 @@ */ public class DoubleTestSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForDouble, TestColumnSource { - private long lastAdditionTime = LogicalClock.DEFAULT.currentStep(); + + private long lastAdditionTime; protected final Long2DoubleOpenHashMap data = new Long2DoubleOpenHashMap(); protected Long2DoubleOpenHashMap prevData; private final UpdateCommitter prevFlusher = - new UpdateCommitter<>(this, DoubleTestSource::flushPrevious); + new UpdateCommitter<>(this, updateGraph, DoubleTestSource::flushPrevious); // region empty constructor public DoubleTestSource() { @@ -55,6 +56,7 @@ public DoubleTestSource() { // region chunk constructor public DoubleTestSource(RowSet rowSet, Chunk data) { super(double.class); + lastAdditionTime = updateGraph.clock().currentStep(); add(rowSet, data); setDefaultReturnValue(this.data); this.prevData = this.data; @@ -113,7 +115,7 @@ public void accept(final long v) { // endregion chunk add private void maybeInitializePrevForStep() { - long currentStep = LogicalClock.DEFAULT.currentStep(); + long currentStep = updateGraph.clock().currentStep(); if (currentStep == lastAdditionTime) { return; } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/FloatTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/FloatTestSource.java index 31baddb1ff8..9a8385e39d0 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/FloatTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/FloatTestSource.java @@ -14,12 +14,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.QueryConstants; @@ -39,12 +39,13 @@ */ public class FloatTestSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForFloat, TestColumnSource { - private long lastAdditionTime = LogicalClock.DEFAULT.currentStep(); + + private long lastAdditionTime; protected final Long2FloatOpenHashMap data = new Long2FloatOpenHashMap(); protected Long2FloatOpenHashMap prevData; private final UpdateCommitter prevFlusher = - new UpdateCommitter<>(this, FloatTestSource::flushPrevious); + new UpdateCommitter<>(this, updateGraph, FloatTestSource::flushPrevious); // region empty constructor public FloatTestSource() { @@ -55,6 +56,7 @@ public FloatTestSource() { // region chunk constructor public FloatTestSource(RowSet rowSet, Chunk data) { super(float.class); + lastAdditionTime = updateGraph.clock().currentStep(); add(rowSet, data); setDefaultReturnValue(this.data); this.prevData = this.data; @@ -113,7 +115,7 @@ public void accept(final long v) { // endregion chunk add private void maybeInitializePrevForStep() { - long currentStep = LogicalClock.DEFAULT.currentStep(); + long currentStep = updateGraph.clock().currentStep(); if (currentStep == lastAdditionTime) { return; } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/IntTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/IntTestSource.java index d50deb535ff..230b2e26600 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/IntTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/IntTestSource.java @@ -14,12 +14,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.QueryConstants; @@ -39,12 +39,13 @@ */ public class IntTestSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForInt, TestColumnSource { - private long lastAdditionTime = LogicalClock.DEFAULT.currentStep(); + + private long lastAdditionTime; protected final Long2IntOpenHashMap data = new Long2IntOpenHashMap(); protected Long2IntOpenHashMap prevData; private final UpdateCommitter prevFlusher = - new UpdateCommitter<>(this, IntTestSource::flushPrevious); + new UpdateCommitter<>(this, updateGraph, IntTestSource::flushPrevious); // region empty constructor public IntTestSource() { @@ -55,6 +56,7 @@ public IntTestSource() { // region chunk constructor public IntTestSource(RowSet rowSet, Chunk data) { super(int.class); + lastAdditionTime = updateGraph.clock().currentStep(); add(rowSet, data); setDefaultReturnValue(this.data); this.prevData = this.data; @@ -113,7 +115,7 @@ public void accept(final long v) { // endregion chunk add private void maybeInitializePrevForStep() { - long currentStep = LogicalClock.DEFAULT.currentStep(); + long currentStep = updateGraph.clock().currentStep(); if (currentStep == lastAdditionTime) { return; } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/LongTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/LongTestSource.java index 524314cfa73..5e421836f1a 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/LongTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/LongTestSource.java @@ -14,12 +14,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.QueryConstants; @@ -39,12 +39,13 @@ */ public class LongTestSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForLong, TestColumnSource { - private long lastAdditionTime = LogicalClock.DEFAULT.currentStep(); + + private long lastAdditionTime; protected final Long2LongOpenHashMap data = new Long2LongOpenHashMap(); protected Long2LongOpenHashMap prevData; private final UpdateCommitter prevFlusher = - new UpdateCommitter<>(this, LongTestSource::flushPrevious); + new UpdateCommitter<>(this, updateGraph, LongTestSource::flushPrevious); // region empty constructor public LongTestSource() { @@ -55,6 +56,7 @@ public LongTestSource() { // region chunk constructor public LongTestSource(RowSet rowSet, Chunk data) { super(long.class); + lastAdditionTime = updateGraph.clock().currentStep(); add(rowSet, data); setDefaultReturnValue(this.data); this.prevData = this.data; @@ -113,7 +115,7 @@ public void accept(final long v) { // endregion chunk add private void maybeInitializePrevForStep() { - long currentStep = LogicalClock.DEFAULT.currentStep(); + long currentStep = updateGraph.clock().currentStep(); if (currentStep == lastAdditionTime) { return; } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ObjectTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ObjectTestSource.java index c6405969130..1d166a9031d 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ObjectTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ObjectTestSource.java @@ -14,12 +14,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.type.TypeUtils; @@ -38,12 +38,13 @@ */ public class ObjectTestSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForObject, TestColumnSource { - private long lastAdditionTime = LogicalClock.DEFAULT.currentStep(); + + private long lastAdditionTime; protected final Long2ObjectOpenHashMap data = new Long2ObjectOpenHashMap(); protected Long2ObjectOpenHashMap prevData; private final UpdateCommitter prevFlusher = - new UpdateCommitter<>(this, ObjectTestSource::flushPrevious); + new UpdateCommitter<>(this, updateGraph, ObjectTestSource::flushPrevious); // region empty constructor public ObjectTestSource(Class type) { @@ -98,7 +99,7 @@ public void accept(final long v) { // endregion chunk add private void maybeInitializePrevForStep() { - long currentStep = LogicalClock.DEFAULT.currentStep(); + long currentStep = updateGraph.clock().currentStep(); if (currentStep == lastAdditionTime) { return; } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ShortTestSource.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ShortTestSource.java index 60617ba3628..a4bb8cfe0e0 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ShortTestSource.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/sources/ShortTestSource.java @@ -14,12 +14,12 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.ObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.table.impl.AbstractColumnSource; import io.deephaven.engine.table.impl.MutableColumnSourceGetDefaults; -import io.deephaven.engine.updategraph.LogicalClock; import io.deephaven.engine.updategraph.TerminalNotification; import io.deephaven.engine.updategraph.UpdateCommitter; import io.deephaven.util.QueryConstants; @@ -39,12 +39,13 @@ */ public class ShortTestSource extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForShort, TestColumnSource { - private long lastAdditionTime = LogicalClock.DEFAULT.currentStep(); + + private long lastAdditionTime; protected final Long2ShortOpenHashMap data = new Long2ShortOpenHashMap(); protected Long2ShortOpenHashMap prevData; private final UpdateCommitter prevFlusher = - new UpdateCommitter<>(this, ShortTestSource::flushPrevious); + new UpdateCommitter<>(this, updateGraph, ShortTestSource::flushPrevious); // region empty constructor public ShortTestSource() { @@ -55,6 +56,7 @@ public ShortTestSource() { // region chunk constructor public ShortTestSource(RowSet rowSet, Chunk data) { super(short.class); + lastAdditionTime = updateGraph.clock().currentStep(); add(rowSet, data); setDefaultReturnValue(this.data); this.prevData = this.data; @@ -113,7 +115,7 @@ public void accept(final long v) { // endregion chunk add private void maybeInitializePrevForStep() { - long currentStep = LogicalClock.DEFAULT.currentStep(); + long currentStep = updateGraph.clock().currentStep(); if (currentStep == lastAdditionTime) { return; } diff --git a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/testcase/RefreshingTableTestCase.java b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/testcase/RefreshingTableTestCase.java index 31d18625a1e..8fce59cc3c0 100644 --- a/engine/test-utils/src/main/java/io/deephaven/engine/testutil/testcase/RefreshingTableTestCase.java +++ b/engine/test-utils/src/main/java/io/deephaven/engine/testutil/testcase/RefreshingTableTestCase.java @@ -6,19 +6,16 @@ import io.deephaven.base.testing.BaseArrayTestCase; import io.deephaven.chunk.util.pools.ChunkPoolReleaseTracking; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.QueryCompiler; -import io.deephaven.engine.context.TestExecutionContextAccess; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.UpdateErrorReporter; import io.deephaven.engine.table.impl.perf.UpdatePerformanceTracker; import io.deephaven.engine.table.impl.util.AsyncClientErrorNotifier; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.engine.testutil.GenerateTableUpdates; -import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; import io.deephaven.util.ExceptionDetails; import io.deephaven.util.SafeCloseable; @@ -44,7 +41,7 @@ abstract public class RefreshingTableTestCase extends BaseArrayTestCase implemen private boolean expectError = false; private SafeCloseable livenessScopeCloseable; private boolean oldLogEnabled; - private boolean oldCheckLtm; + private boolean oldSerialSafe; private SafeCloseable executionContext; List errors; @@ -57,19 +54,20 @@ public static int scaleToDesiredTestLength(final int maxIter) { public void setUp() throws Exception { super.setUp(); - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + // initialize the unit test's execution context + executionContext = TestExecutionContext.createForUnitTests().open(); + + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); + updateGraph.resetForUnitTests(false); SystemicObjectTracker.markThreadSystemic(); oldMemoize = QueryTable.setMemoizeResults(false); oldReporter = AsyncClientErrorNotifier.setReporter(this); errors = null; livenessScopeCloseable = LivenessScopeStack.open(new LivenessScope(true), true); - // initialize the unit test's execution context - executionContext = TestExecutionContextAccess.createForUnitTests().open(); - oldLogEnabled = QueryCompiler.setLogEnabled(ENABLE_QUERY_COMPILER_LOGGING); - oldCheckLtm = UpdateGraphProcessor.DEFAULT.setCheckTableOperations(false); + oldSerialSafe = updateGraph.setSerialTableOperationsSafe(true); UpdatePerformanceTracker.getInstance().enableUnitTestMode(); ChunkPoolReleaseTracking.enableStrict(); } @@ -77,7 +75,8 @@ public void setUp() throws Exception { @Override public void tearDown() throws Exception { ChunkPoolReleaseTracking.checkAndDisable(); - UpdateGraphProcessor.DEFAULT.setCheckTableOperations(oldCheckLtm); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.setSerialTableOperationsSafe(oldSerialSafe); QueryCompiler.setLogEnabled(oldLogEnabled); // reset the execution context @@ -86,7 +85,7 @@ public void tearDown() throws Exception { livenessScopeCloseable.close(); AsyncClientErrorNotifier.setReporter(oldReporter); QueryTable.setMemoizeResults(oldMemoize); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + updateGraph.resetForUnitTests(true); super.tearDown(); } @@ -154,7 +153,8 @@ public static void simulateShiftAwareStep(final String ctxt, int targetUpdateSiz protected static void simulateShiftAwareStep(final GenerateTableUpdates.SimulationProfile simulationProfile, final String ctxt, int targetUpdateSize, Random random, QueryTable table, ColumnInfo[] columnInfo, EvalNuggetInterface[] en) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates .generateShiftAwareTableUpdates(simulationProfile, targetUpdateSize, random, table, columnInfo)); TstUtils.validate(ctxt, en); // The EvalNugget test cases end up generating very big listener DAGs, for at each step we create a brand new diff --git a/engine/updategraph/build.gradle b/engine/updategraph/build.gradle index 752c7361c7e..1194f5ed0a9 100644 --- a/engine/updategraph/build.gradle +++ b/engine/updategraph/build.gradle @@ -9,7 +9,6 @@ dependencies { api project(':qst') implementation project(':engine-chunk') - implementation project(':engine-context') implementation project(':hotspot') implementation project(':log-factory') implementation project(':Configuration') @@ -20,7 +19,10 @@ dependencies { compileOnly 'com.google.code.findbugs:jsr305:3.0.2' compileOnly depAnnotations + testImplementation project(':engine-context') + Classpaths.inheritJUnitClassic(project, 'testImplementation') + testImplementation project(':engine-test-utils') testRuntimeOnly project(':log-to-slf4j'), project(path: ':configs'), diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/exceptions/UpdateGraphConflictException.java b/engine/updategraph/src/main/java/io/deephaven/engine/exceptions/UpdateGraphConflictException.java new file mode 100644 index 00000000000..7fb63ed0693 --- /dev/null +++ b/engine/updategraph/src/main/java/io/deephaven/engine/exceptions/UpdateGraphConflictException.java @@ -0,0 +1,10 @@ +package io.deephaven.engine.exceptions; + +import io.deephaven.UncheckedDeephavenException; + +public class UpdateGraphConflictException extends UncheckedDeephavenException { + + public UpdateGraphConflictException(final String reason) { + super(reason); + } +} diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/AbstractNotification.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/AbstractNotification.java index 03a19d48ef5..fe4b652eca3 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/AbstractNotification.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/AbstractNotification.java @@ -23,7 +23,7 @@ protected AbstractNotification(final boolean isTerminal) { } @Override - public boolean mustExecuteWithUgpLock() { + public boolean mustExecuteWithUpdateGraphLock() { return false; } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/LogicalClock.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/LogicalClock.java index ff89b7d532f..1db677688fe 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/LogicalClock.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/LogicalClock.java @@ -3,64 +3,41 @@ */ package io.deephaven.engine.updategraph; -import io.deephaven.base.verify.Assert; -import io.deephaven.internal.log.LoggerFactory; -import io.deephaven.io.logger.Logger; -import io.deephaven.util.annotations.TestUseOnly; - -import java.util.concurrent.atomic.AtomicLong; +import io.deephaven.util.annotations.FinalDefault; /** - *

- * A logical update clock that has two states, Updating and Idle. - *

- * - *

- * Each time {@link #startUpdateCycle()} is called, the clock transitions to the Updating state and the current - * {@link #currentValue() value} is incremented by one. - *

- * - *

- * When {@link #completeUpdateCycle()} is called, the clock transitions back to Idle. - *

+ * A logical update clock interface that has two states, Updating and Idle. */ -public enum LogicalClock { - - DEFAULT; - - private static final Logger log = LoggerFactory.getLogger(LogicalClock.class); +public interface LogicalClock { /** * The state component of a logical timestamp. */ - public enum State { + enum State { /** - * Clock state for logical timestamps when the associated {@link UpdateGraphProcessor} is propagating updates. + * Clock state for logical timestamps when the associated {@link UpdateGraph} is propagating updates. */ Updating, /** - * Clock state for logical timestamps when the associated {@link UpdateGraphProcessor} is not - * propagating updates. + * Clock state for logical timestamps when the associated {@link UpdateGraph} is not propagating + * updates. */ Idle } - private static final long STEP_SHIFT = 1; - private static final long STATE_MASK = 1L; - - // {2, Idle}, just in case any code has 0 or 1 as an initializer. - private final AtomicLong currentValue = new AtomicLong(5L); + long STEP_SHIFT = 1; + long STATE_MASK = 1L; /** * Get the clock step for the input clock value. The step increments one time for each complete - * {@link #startUpdateCycle() start} - {@link #completeUpdateCycle() end} cycle. + * {@link LogicalClockImpl#startUpdateCycle() start} - {@link LogicalClockImpl#completeUpdateCycle() end} cycle. * * @param value The clock value to get the step for * @return The clock step associated with value */ - public static long getStep(final long value) { + static long getStep(final long value) { return value >>> STEP_SHIFT; } @@ -70,23 +47,22 @@ public static long getStep(final long value) { * @param value The clock value * @return The clock state associated with the input value */ - public static State getState(final long value) { + static State getState(final long value) { return ((value & STATE_MASK) == 0) ? State.Updating : State.Idle; } /** * Get the current value of the clock. */ - public final long currentValue() { - return currentValue.get(); - } + long currentValue(); /** * Get the current Step of the clock. * * @see #getStep(long) */ - public final long currentStep() { + @FinalDefault + default long currentStep() { return getStep(currentValue()); } @@ -95,69 +71,8 @@ public final long currentStep() { * * @see #getState(long) */ - public final State currentState() { + @FinalDefault + default State currentState() { return getState(currentValue()); } - - /** - * Increment the current value and set the clock state to {@link State#Updating updating}. - * - * @implNote The clock must have been {@link State#Idle idle} before this method is called. - */ - public final long startUpdateCycle() { - final long beforeValue = currentValue.get(); - Assert.eq(getState(beforeValue), "getState(beforeValue)", State.Idle); - final long afterValue = currentValue.incrementAndGet(); - Assert.eq(afterValue, "currentValue.incrementAndGet()", beforeValue + 1, "beforeValue + 1"); - return afterValue; - } - - /** - * Increment the current step and set the clock state to {@link State#Idle idle}. - * - * @implNote The clock must have been {@link State#Updating updating} before this method is called. - */ - public final void completeUpdateCycle() { - final long value = currentValue.get(); - Assert.eq(getState(value), "getState(value)", State.Updating); - Assert.eq(currentValue.incrementAndGet(), "currentValue.incrementAndGet()", value + 1, "value + 1"); - } - - /** - * After we complete a table run, we must ensure that the logical clock is idle. - * - *

- * The only valid possibilities are (1) we have completed the cycle, in which case we return; or (2) we have - * terminated the cycle early and have the same value as at the start of our updating cycle, in which case we - * complete the cycle. - *

- * - *

- * If our clock is any other value; then it was changed out from under us and we throw an exception. - *

- * - * @param updatingCycleValue the clock value at the end of {@link #startUpdateCycle} - */ - public final void ensureUpdateCycleCompleted(final long updatingCycleValue) { - final long value = currentValue.get(); - if (value == updatingCycleValue + 1) { - return; - } - if (value == updatingCycleValue) { - log.warn() - .append("LogicalClock cycle was not completed in normal operation, value=").append(value).endl(); - completeUpdateCycle(); - return; - } - throw new IllegalStateException("Inconsistent LogicalClock value at end of cycle, expected " - + (updatingCycleValue + 1) + ", encountered " + value); - } - - /** - * Reset the clock to its initial state, in order to ensure that unit tests proceed cleanly. - */ - @TestUseOnly - public final void resetForUnitTests() { - currentValue.set(5L); - } } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/LogicalClockImpl.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/LogicalClockImpl.java new file mode 100644 index 00000000000..74c50d1892f --- /dev/null +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/LogicalClockImpl.java @@ -0,0 +1,105 @@ +/** + * Copyright (c) 2016-2023 Deephaven Data Labs and Patent Pending + */ +package io.deephaven.engine.updategraph; + +import io.deephaven.base.verify.Assert; +import io.deephaven.internal.log.LoggerFactory; +import io.deephaven.io.logger.Logger; +import io.deephaven.util.annotations.TestUseOnly; + +import java.util.concurrent.atomic.AtomicLong; + +/** + *

+ * A logical update clock that has two states, Updating and Idle. + *

+ * + *

+ * Each time {@link #startUpdateCycle()} is called, the clock transitions to the Updating state and the current + * {@link #currentValue() value} is incremented by one. + *

+ * + *

+ * When {@link #completeUpdateCycle()} is called, the clock transitions back to Idle. + *

+ */ +public class LogicalClockImpl implements LogicalClock { + + private static final Logger log = LoggerFactory.getLogger(LogicalClockImpl.class); + + // {2, Idle}, just in case any code has 0 or 1 as an initializer. + private final AtomicLong currentValue = new AtomicLong(5L); + + /** + * Get the current value of the clock. + */ + @Override + public final long currentValue() { + return currentValue.get(); + } + + + /** + * Increment the current value and set the clock state to {@link State#Updating updating}. + * + * @implNote The clock must have been {@link State#Idle idle} before this method is called. + */ + public final long startUpdateCycle() { + final long beforeValue = currentValue.get(); + Assert.eq(LogicalClock.getState(beforeValue), "getState(beforeValue)", State.Idle); + final long afterValue = currentValue.incrementAndGet(); + Assert.eq(afterValue, "currentValue.incrementAndGet()", beforeValue + 1, "beforeValue + 1"); + return afterValue; + } + + /** + * Increment the current step and set the clock state to {@link State#Idle idle}. + * + * @implNote The clock must have been {@link State#Updating updating} before this method is called. + */ + public final void completeUpdateCycle() { + final long value = currentValue.get(); + Assert.eq(LogicalClock.getState(value), "getState(value)", State.Updating); + Assert.eq(currentValue.incrementAndGet(), "currentValue.incrementAndGet()", value + 1, "value + 1"); + } + + /** + * After we complete a table run, we must ensure that the logical clock is idle. + * + *

+ * The only valid possibilities are (1) we have completed the cycle, in which case we return; or (2) we have + * terminated the cycle early and have the same value as at the start of our updating cycle, in which case we + * complete the cycle. + *

+ * + *

+ * If our clock is any other value; then it was changed out from under us and we throw an exception. + *

+ * + * @param updatingCycleValue the clock value at the end of {@link #startUpdateCycle} + */ + public final void ensureUpdateCycleCompleted(final long updatingCycleValue) { + final long value = currentValue.get(); + if (value == updatingCycleValue + 1) { + return; + } + if (value == updatingCycleValue) { + log.warn() + .append("LogicalClockImpl cycle was not completed in normal operation, value=").append(value) + .endl(); + completeUpdateCycle(); + return; + } + throw new IllegalStateException("Inconsistent LogicalClockImpl value at end of cycle, expected " + + (updatingCycleValue + 1) + ", encountered " + value); + } + + /** + * Reset the clock to its initial state, in order to ensure that unit tests proceed cleanly. + */ + @TestUseOnly + public final void resetForUnitTests() { + currentValue.set(5L); + } +} diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/NotificationAdapter.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/NotificationAdapter.java index 5175c7579fb..912d765abb2 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/NotificationAdapter.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/NotificationAdapter.java @@ -13,7 +13,7 @@ public class NotificationAdapter extends AbstractNotification { private final NotificationQueue.Notification wrapped; - NotificationAdapter(@NotNull final NotificationQueue.Notification wrapped) { + public NotificationAdapter(@NotNull final NotificationQueue.Notification wrapped) { super(wrapped.isTerminal()); this.wrapped = wrapped; } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/NotificationQueue.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/NotificationQueue.java index 8bace81dd88..b10f5b5c170 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/NotificationQueue.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/NotificationQueue.java @@ -4,8 +4,12 @@ package io.deephaven.engine.updategraph; import io.deephaven.base.log.LogOutputAppendable; +import io.deephaven.engine.exceptions.UpdateGraphConflictException; import io.deephaven.util.datastructures.linked.IntrusiveDoublyLinkedNode; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Collection; /** * Interface for notification of update graph node changes. @@ -26,14 +30,14 @@ interface Notification extends Runnable, LogOutputAppendable, IntrusiveDoublyLin boolean isTerminal(); /** - * If a terminal notification must be executed on the main UGP thread, it must override this method, so that the - * notification is not executed on the run pool. - * + * If a terminal notification must be executed serially (typically under an UpdateGraph's exclusive lock), it + * must override this method so that the notification is not executed concurrently with other notifications. + *

* It is an error to return true if this notification is not terminal * - * @return true if this notification must be executed directly under the protection of the UGP lock + * @return true if this notification must be executed serially */ - boolean mustExecuteWithUgpLock(); + boolean mustExecuteWithUpdateGraphLock(); /** * Can this notification be executed? That is, are all of it's dependencies satisfied. @@ -56,10 +60,62 @@ interface Dependency extends LogOutputAppendable { * * @param step The step for which we are testing satisfaction * @return Whether the dependency is satisfied on {@code step} (and will not fire subsequent notifications) - * @implNote For all practical purposes, all implementations should consider whether the - * {@link UpdateGraphProcessor} itself is satisfied if they have no other dependencies. + * @implNote For all practical purposes, all implementations should consider whether the {@link UpdateGraph} + * itself is satisfied if they have no other dependencies. */ boolean satisfied(long step); + + /** + * @return the update graph that this dependency is a part of + */ + UpdateGraph getUpdateGraph(); + + default UpdateGraph getUpdateGraph(Dependency... dependencies) { + return NotificationQueue.Dependency.getUpdateGraph(this, dependencies); + } + + /** + * Examine all {@code dependencies} excluding non-refreshing {@link DynamicNode dynamic nodes}, and verify that + * they are using the same {@link UpdateGraph}. + *

+ * If a singular update graph was found in this process, return it. + *

+ * Otherwise, if all dependencies are non-refreshing {@link DynamicNode dynamic nodes}, return null. + * + * @param first at least one dependency is helpful + * @param dependencies the dependencies to examine + * @return the singular {@link UpdateGraph} used by all {@code dependencies}, or null if all + * {@code dependencies} are non-refreshing {@link DynamicNode dynamic nodes} + * @throws UpdateGraphConflictException if multiple update graphs were found in the dependencies + */ + static UpdateGraph getUpdateGraph(@Nullable Dependency first, Dependency... dependencies) { + UpdateGraph graph = null; + UpdateGraph firstNonNullGraph = null; + + if (first != null) { + firstNonNullGraph = first.getUpdateGraph(); + if (!DynamicNode.isDynamicAndNotRefreshing(first)) { + graph = first.getUpdateGraph(); + } + } + + for (final Dependency other : dependencies) { + if (other != null && firstNonNullGraph == null) { + firstNonNullGraph = other.getUpdateGraph(); + } + if (other == null || DynamicNode.isDynamicAndNotRefreshing(other)) { + continue; + } + if (graph == null) { + graph = other.getUpdateGraph(); + } else if (graph != other.getUpdateGraph()) { + throw new UpdateGraphConflictException("Multiple update graphs found in dependencies: " + graph + + " and " + other.getUpdateGraph()); + } + } + + return graph == null ? firstNonNullGraph : graph; + } } /** @@ -71,6 +127,15 @@ interface Dependency extends LogOutputAppendable { */ void addNotification(@NotNull Notification notification); + /** + * Enqueue a collection of notifications to be flushed. + * + * @param notifications The notification to enqueue + * + * @see #addNotification(Notification) + */ + void addNotifications(@NotNull final Collection notifications); + /** * Add a notification for this NotificationQueue to deliver (by invoking its run() method), iff the delivery step is * the current step and the update cycle for that step is still in process. This is only supported for non-terminal diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/OneShotUpdateCombiner.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/OneShotUpdateCombiner.java index 86b9348f31b..bbc57495310 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/OneShotUpdateCombiner.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/OneShotUpdateCombiner.java @@ -15,6 +15,11 @@ public class OneShotUpdateCombiner implements Runnable, UpdateSourceRegistrar { private final Queue sources = new ArrayDeque<>(); + private final UpdateGraph updateGraph; + + public OneShotUpdateCombiner(final UpdateGraph updateGraph) { + this.updateGraph = updateGraph; + } @Override public void run() { @@ -45,10 +50,10 @@ public void removeSource(@NotNull final Runnable updateSource) { } /** - * Passes through to the {@link UpdateGraphProcessor#DEFAULT update graph processor}. + * Passes through to the {@link UpdateGraph update graph} associated with the current update context. */ @Override public void requestRefresh() { - UpdateGraphProcessor.DEFAULT.requestRefresh(); + updateGraph.requestRefresh(); } } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateCommitter.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateCommitter.java index b59f2ed1ee4..d03541938ee 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateCommitter.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateCommitter.java @@ -16,10 +16,12 @@ public class UpdateCommitter extends TerminalNotification { private final WeakReference targetReference; private final Consumer committer; + private final UpdateGraph updateGraph; private boolean active; - public UpdateCommitter(T target, Consumer committer) { + public UpdateCommitter(T target, UpdateGraph updateGraph, Consumer committer) { this.targetReference = new WeakReference<>(target); + this.updateGraph = updateGraph; this.committer = committer; this.active = false; } @@ -38,6 +40,6 @@ public void maybeActivate() { return; } active = true; - UpdateGraphProcessor.DEFAULT.addNotification(this); + updateGraph.addNotification(this); } } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateCommitterEx.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateCommitterEx.java index 93140571060..cc05a6d83e2 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateCommitterEx.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateCommitterEx.java @@ -20,13 +20,15 @@ public class UpdateCommitterEx extends TerminalNotification { private final WeakReference targetReference; + private final UpdateGraph updateGraph; private final BiConsumer committer; private WeakReference secondaryReference; private boolean active; - public UpdateCommitterEx(T target, BiConsumer committer) { + public UpdateCommitterEx(T target, UpdateGraph updateGraph, BiConsumer committer) { this.targetReference = new WeakReference<>(target); + this.updateGraph = updateGraph; this.committer = committer; } @@ -48,6 +50,6 @@ public void maybeActivate(@NotNull final U secondary) { if (secondaryReference == null || secondaryReference.get() != secondary) { secondaryReference = new WeakReference<>(secondary); } - UpdateGraphProcessor.DEFAULT.addNotification(this); + updateGraph.addNotification(this); } } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraph.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraph.java new file mode 100644 index 00000000000..0f016726a7a --- /dev/null +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraph.java @@ -0,0 +1,233 @@ +package io.deephaven.engine.updategraph; + +import io.deephaven.base.log.LogOutput; +import io.deephaven.base.verify.Assert; +import io.deephaven.io.log.LogEntry; +import io.deephaven.util.function.ThrowingSupplier; +import io.deephaven.util.locks.AwareFunctionalLock; +import org.jetbrains.annotations.NotNull; + +import java.util.concurrent.locks.Condition; +import java.util.function.Supplier; + +public interface UpdateGraph extends UpdateSourceRegistrar, NotificationQueue, NotificationQueue.Dependency { + + // region general accessors + + /** + * @return The shared {@link AwareFunctionalLock} to use with this update graph + */ + AwareFunctionalLock sharedLock(); + + /** + * @return The exclusive {@link AwareFunctionalLock} to use with this update graph + */ + AwareFunctionalLock exclusiveLock(); + + /** + * @return The {@link LogicalClock} to use with this update graph + */ + LogicalClock clock(); + + /** + * Retrieve the number of independent update propagation tasks this UpdateGraph can process concurrently. + *

+ * For example, an implementation using a fixed-size thread pool of update task workers should return the size of + * the thread pool. + *

+ * This is exposed in order to allow users to determine the ideal way to partition their queries for maximum + * parallelism without undue overhead. + * + * @return number of independent update propagation tasks this UpdateGraph can process concurrently + */ + int parallelismFactor(); + + /** + * Cast {@code this} to a more specific UpdateGraph type, in order to access implementation-specific methods. + * + * @param The UpdateGraph type to cast to + * @return {@code this} + */ + default UG_TYPE cast() { + // noinspection unchecked + return (UG_TYPE) this; + } + + // endregion general accessors + + // region notification support tools + + /** + * @return A LogEntry that may be prefixed with UpdateGraph information + */ + LogEntry logDependencies(); + + // endregion notification support tools + + // region thread control + + /** + * Test if the current thread is involved in processing updates for this UpdateGraph. If so, non-terminal user + * notifications on the current thread must not attempt to lock this UpdateGraph. + * + * @return Whether the current thread is involved in processing updates for this UpdateGraph + */ + boolean currentThreadProcessesUpdates(); + + /** + * Test if engine code executing on the current thread should assume safety for serial table operations. Operations + * annotated as concurrent are always safe. + * + * @return Whether code on this thread should assume serial table operation safety + * @see #checkInitiateSerialTableOperation() + * @see #setSerialTableOperationsSafe(boolean) + * @see #allowSerialTableOperations(Supplier) + * @see #allowSerialTableOperations(ThrowingSupplier) + */ + boolean serialTableOperationsSafe(); + + /** + * User or engine code that makes its own determination about the safety of initiating serial table operations on + * the current thread may use this method to override default behavior. The previous value should be restored + * immediately after use, typically with the following pattern: + * + *

+     * boolean oldValue = assumeSerialTableOperations(true);
+     * try {
+     *     // ... safe table operations here
+     * } finally {
+     *     assumeSerialTableOperations(oldValue);
+     * }
+     * 
+ * + * @param newValue the new value + * @return the old value + * @see #serialTableOperationsSafe() + * @see #allowSerialTableOperations(Supplier) + * @see #allowSerialTableOperations(ThrowingSupplier) + */ + boolean setSerialTableOperationsSafe(boolean newValue); + + /** + * User or engine code that is certain a particular table operation is safe to execute with respect to this + * UpdateGraph may use this method to follow the prescribed pattern for declaring safety and reinstating the + * priority safety parameters. + * + * @param operation The safe operation to perform + * @param The return type of the operation + * @return The result of {@code operation} + * @see #serialTableOperationsSafe() + */ + default RETURN_TYPE allowSerialTableOperations(@NotNull final Supplier operation) { + final boolean oldValue = setSerialTableOperationsSafe(true); + try { + return operation.get(); + } finally { + setSerialTableOperationsSafe(oldValue); + } + } + + /** + * User or engine code that is certain a particular table operation is safe to execute with respect to this + * UpdateGraph may use this method to follow the prescribed pattern for declaring safety and reinstating the + * priority safety parameters. + * + * @param operation The safe operation to perform + * @param The return type of the operation + * @param The exception type the operation might throw + * @return The result of {@code operation} + * @throws EXCEPTION_TYPE if {@code operation} throws + * @see #serialTableOperationsSafe() + */ + default RETURN_TYPE allowSerialTableOperations( + @NotNull final ThrowingSupplier operation) throws EXCEPTION_TYPE { + final boolean oldValue = setSerialTableOperationsSafe(true); + try { + return operation.get(); + } finally { + setSerialTableOperationsSafe(oldValue); + } + } + + /** + * If we initiate a serial (not annotated as concurrent) table operation that should update using this UpdateGraph + * without holding the appropriate lock, then we are likely committing a grievous error, but one that will only + * occasionally result in us getting the wrong answer or if we are lucky an assertion. This method is called from + * various table operations that should not be established without locking their UpdateGraph. + *

+ * Threads that process this UpdateGraph's updates are assumed to be safe; if dependencies are tracked correctly, + * these threads will only initiate table operations when they can proceed. + *

+ * User or engine code may bypass this check using {@link #setSerialTableOperationsSafe(boolean)} or the related + * wrapper methods. + * + * @see #serialTableOperationsSafe() () + * @see #setSerialTableOperationsSafe(boolean) + * @see #allowSerialTableOperations(Supplier) + * @see #allowSerialTableOperations(ThrowingSupplier) + */ + default void checkInitiateSerialTableOperation() { + if (serialTableOperationsSafe() + || exclusiveLock().isHeldByCurrentThread() + || sharedLock().isHeldByCurrentThread() + || currentThreadProcessesUpdates()) { + return; + } + throw new IllegalStateException(String.format( + "May not initiate serial table operations: exclusiveLockHeld=%s, sharedLockHeld=%s, currentThreadProcessesUpdates=%s", + exclusiveLock().isHeldByCurrentThread(), + sharedLock().isHeldByCurrentThread(), + currentThreadProcessesUpdates())); + } + + // endregion thread control + + // region refresh control + + /** + * @return Whether this UpdateGraph has a mechanism that supports refreshing + */ + boolean supportsRefreshing(); + + /** + * Request that this UpdateGraph process any pending updates as soon as practicable. Updates "hurried" in this way + * are otherwise processed as normal. + */ + void requestRefresh(); + + /** + * Request that a {@link Condition} derived from this UpdateGraph's {@link #exclusiveLock()} be + * {@link Condition#signalAll() signalled} in a safe manner. This may take place asynchronously. + * + * @param exclusiveLockCondition The condition to signal + */ + default void requestSignal(Condition exclusiveLockCondition) { + if (exclusiveLock().isHeldByCurrentThread()) { + exclusiveLockCondition.signalAll(); + } else { + // terminal notifications always run on the UGP thread + final Notification terminalNotification = new TerminalNotification() { + @Override + public void run() { + Assert.assertion(exclusiveLock().isHeldByCurrentThread(), + "exclusiveLock().isHeldByCurrentThread()"); + exclusiveLockCondition.signalAll(); + } + + @Override + public boolean mustExecuteWithUpdateGraphLock() { + return true; + } + + @Override + public LogOutput append(LogOutput output) { + return output.append("SignalNotification(") + .append(System.identityHashCode(exclusiveLockCondition)).append(")"); + } + }; + addNotification(terminalNotification); + } + } + + // endregion refresh control +} diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphLock.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphLock.java index 17843140cee..c0b3e59d6ee 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphLock.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateGraphLock.java @@ -27,14 +27,14 @@ import java.util.stream.Stream; /** - * Lock class to support {@link UpdateGraphProcessor}. + * Lock class to support {@link UpdateGraph}. */ public abstract class UpdateGraphLock { private static final Logger log = LoggerFactory.getLogger(UpdateGraphLock.class); private static final boolean STACK_DUMP_LOCKS = - Configuration.getInstance().getBooleanWithDefault("UpdateGraphProcessor.stackDumpLocks", false); + Configuration.getInstance().getBooleanWithDefault("UpdateGraphLock.stackDumpLocks", false); /** * Instrumentation interface for recording lock events. @@ -66,34 +66,34 @@ public static void installInstrumentation(@Nullable final Instrumentation instru private static Instrumentation instrumentation = new Instrumentation() {}; /** - * Construct a lock for a new {@link UpdateGraphProcessor} instance. + * Construct a lock for a new {@link UpdateGraph} instance. * - * @param logicalClock The {@link LogicalClock} instance to use + * @param updateGraph The {@link UpdateGraph} instance to use * @param allowUnitTestMode Whether this lock instance is to be used for unit tests only */ - public static UpdateGraphLock create(@NotNull final LogicalClock logicalClock, final boolean allowUnitTestMode) { + public static UpdateGraphLock create(@NotNull final UpdateGraph updateGraph, final boolean allowUnitTestMode) { return allowUnitTestMode - ? new ResettableUpdateGraphLock(logicalClock) - : new FinalUpdateGraphLock(logicalClock); + ? new ResettableUpdateGraphLock(updateGraph) + : new FinalUpdateGraphLock(updateGraph); } /** - * The {@link LogicalClock} used for instrumentation and assertions. + * The {@link UpdateGraph} used for instrumentation and assertions. */ - protected final LogicalClock logicalClock; + protected final UpdateGraph updateGraph; /** - * Construct a lock for a new {@link UpdateGraphProcessor} instance. + * Construct a lock for a new {@link UpdateGraph} instance. * - * @param logicalClock The {@link LogicalClock} instance to use + * @param updateGraph The {@link UpdateGraph} instance to use */ - UpdateGraphLock(@NotNull final LogicalClock logicalClock) { - this.logicalClock = logicalClock; + UpdateGraphLock(@NotNull final UpdateGraph updateGraph) { + this.updateGraph = updateGraph; } /** * Get the shared lock (similar to {@link java.util.concurrent.locks.ReadWriteLock#readLock()}, but with - * UGP-specific instrumentation). See {@link UpdateGraphProcessor#sharedLock()} for user-facing documentation. + * UGP-specific instrumentation). See {@link UpdateGraph#sharedLock()} for user-facing documentation. * * @return The shared lock */ @@ -101,7 +101,7 @@ public static UpdateGraphLock create(@NotNull final LogicalClock logicalClock, f /** * Get the exclusive lock (similar to {@link java.util.concurrent.locks.ReadWriteLock#writeLock()} ()}, but with - * UGP-specific instrumentation). See {@link UpdateGraphProcessor#exclusiveLock()} for user-facing documentation. + * UGP-specific instrumentation). See {@link UpdateGraph#exclusiveLock()} for user-facing documentation. * * @return The exclusive lock */ @@ -127,11 +127,11 @@ private static final class FinalUpdateGraphLock extends UpdateGraphLock { */ private final AwareFunctionalLock exclusiveLock; - private FinalUpdateGraphLock(@NotNull final LogicalClock logicalClock) { - super(logicalClock); + private FinalUpdateGraphLock(@NotNull final UpdateGraph updateGraph) { + super(updateGraph); final ReadWriteLockAccessor lockAccessor = new ReentrantReadWriteLockAccessor(); - this.sharedLock = new SharedLock(logicalClock, lockAccessor); - this.exclusiveLock = new ExclusiveLock(logicalClock, lockAccessor); + this.sharedLock = new SharedLock(updateGraph, lockAccessor); + this.exclusiveLock = new ExclusiveLock(updateGraph, lockAccessor); } @Override @@ -172,15 +172,15 @@ private static final class ResettableUpdateGraphLock extends UpdateGraphLock { */ private volatile AwareFunctionalLock exclusiveLock; - private ResettableUpdateGraphLock(@NotNull final LogicalClock logicalClock) { - super(logicalClock); + private ResettableUpdateGraphLock(@NotNull final UpdateGraph updateGraph) { + super(updateGraph); initialize(); } private synchronized void initialize() { lockAccessor = new RecordedReadWriteLockAccessor(); - sharedLock = new SharedLock(logicalClock, lockAccessor); - exclusiveLock = new ExclusiveLock(logicalClock, lockAccessor); + sharedLock = new SharedLock(updateGraph, lockAccessor); + exclusiveLock = new ExclusiveLock(updateGraph, lockAccessor); } @Override @@ -269,7 +269,7 @@ private static class SharedLock implements AwareFunctionalLock { /** * Logical clock used for correctness checks. */ - private final LogicalClock logicalClock; + private final UpdateGraph updateGraph; /** * Accessor for the underlying lock implementation. @@ -282,9 +282,9 @@ private static class SharedLock implements AwareFunctionalLock { private final Lock readLock; private SharedLock( - @NotNull final LogicalClock logicalClock, + @NotNull final UpdateGraph updateGraph, @NotNull final ReadWriteLockAccessor lockAccessor) { - this.logicalClock = logicalClock; + this.updateGraph = updateGraph; this.lockAccessor = lockAccessor; this.readLock = lockAccessor.readLock(); } @@ -296,10 +296,10 @@ public final boolean isHeldByCurrentThread() { @Override public final void lock() { - checkForIllegalLockFromRefreshThread(logicalClock); + checkForIllegalLockFromRefreshThread(updateGraph); final MutableBoolean lockSucceeded = new MutableBoolean(false); try { - instrumentation.recordAction("Acquire UpdateGraphProcessor readLock", () -> { + instrumentation.recordAction("Acquire UpdateGraph readLock", () -> { readLock.lock(); lockSucceeded.setValue(true); }); @@ -316,10 +316,10 @@ public final void lock() { @Override public final void lockInterruptibly() throws InterruptedException { - checkForIllegalLockFromRefreshThread(logicalClock); + checkForIllegalLockFromRefreshThread(updateGraph); final MutableBoolean lockSucceeded = new MutableBoolean(false); try { - instrumentation.recordActionInterruptibly("Acquire UpdateGraphProcessor readLock interruptibly", + instrumentation.recordActionInterruptibly("Acquire UpdateGraph readLock interruptibly", () -> { readLock.lockInterruptibly(); lockSucceeded.setValue(true); @@ -337,7 +337,7 @@ public final void lockInterruptibly() throws InterruptedException { @Override public final boolean tryLock() { - checkForIllegalLockFromRefreshThread(logicalClock); + checkForIllegalLockFromRefreshThread(updateGraph); if (readLock.tryLock()) { maybeLogStackTrace("locked (shared)"); return true; @@ -347,7 +347,7 @@ public final boolean tryLock() { @Override public final boolean tryLock(final long time, @NotNull final TimeUnit unit) throws InterruptedException { - checkForIllegalLockFromRefreshThread(logicalClock); + checkForIllegalLockFromRefreshThread(updateGraph); if (readLock.tryLock(time, unit)) { maybeLogStackTrace("locked (shared)"); return true; @@ -377,7 +377,7 @@ private static class ExclusiveLock implements AwareFunctionalLock { /** * Logical clock used for correctness checks. */ - private final LogicalClock logicalClock; + private final UpdateGraph updateGraph; /** * Accessor for the underlying lock implementation. @@ -390,9 +390,9 @@ private static class ExclusiveLock implements AwareFunctionalLock { private final Lock writeLock; private ExclusiveLock( - @NotNull final LogicalClock logicalClock, + @NotNull final UpdateGraph updateGraph, @NotNull final ReadWriteLockAccessor lockAccessor) { - this.logicalClock = logicalClock; + this.updateGraph = updateGraph; this.lockAccessor = lockAccessor; this.writeLock = lockAccessor.writeLock(); } @@ -404,15 +404,15 @@ public final boolean isHeldByCurrentThread() { @Override public final void lock() { - checkForIllegalLockFromRefreshThread(logicalClock); + checkForIllegalLockFromRefreshThread(updateGraph); checkForUpgradeAttempt(); final MutableBoolean lockSucceeded = new MutableBoolean(false); try { - instrumentation.recordAction("Acquire UpdateGraphProcessor writeLock", () -> { + instrumentation.recordAction("Acquire UpdateGraph writeLock", () -> { writeLock.lock(); lockSucceeded.setValue(true); }); - Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); + Assert.eq(updateGraph.clock().currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); maybeLogStackTrace("locked (exclusive)"); } catch (Throwable t) { // If the recorder instrumentation causes us to throw an exception after the writeLock was @@ -426,16 +426,16 @@ public final void lock() { @Override public final void lockInterruptibly() throws InterruptedException { - checkForIllegalLockFromRefreshThread(logicalClock); + checkForIllegalLockFromRefreshThread(updateGraph); checkForUpgradeAttempt(); final MutableBoolean lockSucceeded = new MutableBoolean(false); try { - instrumentation.recordActionInterruptibly("Acquire UpdateGraphProcessor writeLock interruptibly", + instrumentation.recordActionInterruptibly("Acquire UpdateGraph writeLock interruptibly", () -> { writeLock.lockInterruptibly(); lockSucceeded.setValue(true); }); - Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); + Assert.eq(updateGraph.clock().currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); maybeLogStackTrace("locked (exclusive)"); } catch (Throwable t) { // If the recorder instrumentation causes us to throw an exception after the writeLock was @@ -449,7 +449,7 @@ public final void lockInterruptibly() throws InterruptedException { @Override public final boolean tryLock() { - checkForIllegalLockFromRefreshThread(logicalClock); + checkForIllegalLockFromRefreshThread(updateGraph); checkForUpgradeAttempt(); if (writeLock.tryLock()) { maybeLogStackTrace("locked (exclusive)"); @@ -460,7 +460,7 @@ public final boolean tryLock() { @Override public final boolean tryLock(final long time, @NotNull final TimeUnit unit) throws InterruptedException { - checkForIllegalLockFromRefreshThread(logicalClock); + checkForIllegalLockFromRefreshThread(updateGraph); checkForUpgradeAttempt(); if (writeLock.tryLock(time, unit)) { maybeLogStackTrace("locked (exclusive)"); @@ -471,7 +471,7 @@ public final boolean tryLock(final long time, @NotNull final TimeUnit unit) thro @Override public final void unlock() { - Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); + Assert.eq(updateGraph.clock().currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); writeLock.unlock(); maybeLogStackTrace("unlocked (exclusive)"); } @@ -493,9 +493,8 @@ private void checkForUpgradeAttempt() { // region Lock Safety Validation Helper - // TODO (https://github.com/deephaven/deephaven-core/pull/3506): Update this for multiple update graphs /** - * Check for inappropriate locking from a refresh thread during the updating phase. + * Check for inappropriate locking from an update thread during the updating phase. *

* Under normal conditions we expect only the primary (or singular, in single-threaded update graph processors) * refresh thread to acquire either lock, and that that thread always acquires the exclusive lock during the idle @@ -503,27 +502,31 @@ private void checkForUpgradeAttempt() { *

* Were a worker refresh thread to attempt to acquire either lock without a timeout during the updating phase, it * would block forever or until interrupted. Trying to lock with a timeout wouldn't block forever, but would - * negatively impact the responsiveness of the update graph processor. This behavior would "work" for + * negatively impact the responsiveness of update graph processing. This behavior would "work" for * misbehaving notifications under a single-threaded update graph processor with the current implementation, but * would immediately become broken upon adding additional update threads. We prefer to proactively prevent * notifications from attempting to do this, rather than leave it for users to debug. *

- * Note that the worker refresh threads (if there are any) are never active during the idle phase unless processing + * Note that the worker update threads (if there are any) are never active during the idle phase unless processing * terminal notifications that don't require the exclusive lock. Other terminal notifications are processed by the * primary refresh thread under the exclusive lock. *

* Two rules follow from this: *

    - *
  1. It is always safe for a refresh thread to acquire either lock during the idle phase, as long as other rules + *
  2. It is always safe for an update thread to acquire either lock during the idle phase, as long as other rules * are respected (no inversions, no upgrades, and no attempts to wait for the update graph to do work).
  3. - *
  4. It is never safe for a refresh thread to acquire either lock during the updating phase.
  5. + *
  6. It is never safe for an update thread to acquire either lock during the updating phase.
  7. *
+ *

+ * Note that this validation only prevents lock attempts from threads belonging to the same {@link UpdateGraph} as + * this UpdateGraphLock. It may be suitable to lock an UpdateGraph from a thread belonging to a different + * UpdateGraph if doing so does not introduce any cycles. * - * @param logicalClock The logical clock to check for {@link LogicalClock#currentState() current state} + * @param updateGraph The update graph to check for {@link UpdateGraph#clock()#currentState() current state} */ - private static void checkForIllegalLockFromRefreshThread(@NotNull final LogicalClock logicalClock) { - if (logicalClock.currentState() == LogicalClock.State.Updating - && UpdateGraphProcessor.DEFAULT.isRefreshThread()) { + private static void checkForIllegalLockFromRefreshThread(@NotNull final UpdateGraph updateGraph) { + if (updateGraph.clock().currentState() == LogicalClock.State.Updating + && updateGraph.currentThreadProcessesUpdates()) { // This exception message assumes the misbehavior is from a notification (e.g. for a user listener), rather // than an internal programming error. throw new UnsupportedOperationException("Non-terminal notifications must not lock the update graph"); @@ -720,7 +723,7 @@ private LockDebugException(@NotNull final String message) { private static void maybeLogStackTrace(final String type) { if (STACK_DUMP_LOCKS) { - log.info().append("Update Graph Processor ").append(new LockDebugException(type)).endl(); + log.info().append("Update Graph ").append(new LockDebugException(type)).endl(); } } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateSourceCombiner.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateSourceCombiner.java index 127d3f72315..3584f060595 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateSourceCombiner.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateSourceCombiner.java @@ -11,12 +11,18 @@ /** * Update source that combines multiple sources in order to force them to be refreshed as a unit within the - * {@link UpdateGraphProcessor#DEFAULT update graph processor}. + * {@link UpdateGraph update graph} provided at construction. */ public class UpdateSourceCombiner extends LivenessArtifact implements Runnable, UpdateSourceRegistrar { + private final UpdateGraph updateGraph; + private final WeakReferenceManager combinedTables = new WeakReferenceManager<>(true); + public UpdateSourceCombiner(final UpdateGraph updateGraph) { + this.updateGraph = updateGraph; + } + @Override public void run() { combinedTables.forEachValidReference(Runnable::run); @@ -26,11 +32,11 @@ public void run() { public void addSource(@NotNull final Runnable updateSource) { if (updateSource instanceof DynamicNode) { final DynamicNode dynamicUpdateSource = (DynamicNode) updateSource; - // Like a UpdateGraphProcessor, we need to ensure that DynamicNodes added to this combiner are set to + // Like a UpdateGraph, we need to ensure that DynamicNodes added to this combiner are set to // refreshing. // NB: addParentReference usually sets refreshing as a side effect, but it's clearer to do it explicitly. dynamicUpdateSource.setRefreshing(true); - // Unlike an UpdateGraphProcessor, we must also ensure that DynamicNodes added to this combiner have the + // Unlike an UpdateGraph, we must also ensure that DynamicNodes added to this combiner have the // combiner as a parent, in order to ensure the integrity of the resulting DAG. dynamicUpdateSource.addParentReference(this); } @@ -43,16 +49,16 @@ public void removeSource(@NotNull final Runnable updateSource) { } /** - * Passes through to the {@link UpdateGraphProcessor#DEFAULT update graph processor}. + * Passes through to the {@link UpdateGraph update graph} passed at construction. */ @Override public void requestRefresh() { - UpdateGraphProcessor.DEFAULT.requestRefresh(); + updateGraph.requestRefresh(); } @Override public void destroy() { super.destroy(); - UpdateGraphProcessor.DEFAULT.removeSource(this); + updateGraph.removeSource(this); } } diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateSourceRegistrar.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateSourceRegistrar.java index f61c78c46be..cc489ccdb69 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateSourceRegistrar.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/UpdateSourceRegistrar.java @@ -5,6 +5,8 @@ import org.jetbrains.annotations.NotNull; +import java.util.Collection; + /** * Common interface for classes that can register and de-register update sources. */ @@ -24,6 +26,18 @@ public interface UpdateSourceRegistrar { */ void removeSource(@NotNull Runnable updateSource); + /** + * Remove a collection of sources from the list of refreshing sources. + * + * @implNote This will not set the sources as {@link DynamicNode#setRefreshing(boolean) non-refreshing}. + * @param sourcesToRemove The sources to remove from the list of refreshing sources + */ + default void removeSources(final Collection sourcesToRemove) { + for (final Runnable source : sourcesToRemove) { + removeSource(source); + } + } + /** * Request that the next update cycle begin as soon as practicable. */ diff --git a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/WaitNotification.java b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/WaitNotification.java index ea9dcf86568..76e4605713b 100644 --- a/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/WaitNotification.java +++ b/engine/updategraph/src/main/java/io/deephaven/engine/updategraph/WaitNotification.java @@ -77,7 +77,8 @@ private void await() throws InterruptedException { public static boolean waitForSatisfaction(final long step, @NotNull final NotificationQueue.Dependency... dependencies) { final WaitNotification waitNotification = new WaitNotification(dependencies); - if (UpdateGraphProcessor.DEFAULT.maybeAddNotification(waitNotification, step)) { + if (NotificationQueue.Dependency.getUpdateGraph(null, dependencies).maybeAddNotification(waitNotification, + step)) { try { waitNotification.await(); } catch (InterruptedException e) { diff --git a/engine/updategraph/src/test/java/io/deephaven/engine/updategraph/TestUpdateGraphLock.java b/engine/updategraph/src/test/java/io/deephaven/engine/updategraph/TestUpdateGraphLock.java index e2eaa5a6883..2b87a49f057 100644 --- a/engine/updategraph/src/test/java/io/deephaven/engine/updategraph/TestUpdateGraphLock.java +++ b/engine/updategraph/src/test/java/io/deephaven/engine/updategraph/TestUpdateGraphLock.java @@ -4,8 +4,11 @@ package io.deephaven.engine.updategraph; import io.deephaven.UncheckedDeephavenException; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import junit.framework.TestCase; import org.apache.commons.lang3.mutable.MutableBoolean; +import org.junit.Rule; import org.junit.Test; import java.util.concurrent.TimeUnit; @@ -17,9 +20,13 @@ */ public class TestUpdateGraphLock { + @Rule + public final EngineCleanup framework = new EngineCleanup(); + @Test public void testUpgradeFailures() throws InterruptedException { - final UpdateGraphLock lock = UpdateGraphLock.create(LogicalClock.DEFAULT, false); + final UpdateGraphLock lock = + UpdateGraphLock.create(ExecutionContext.getContext().getUpdateGraph(), false); lock.sharedLock().doLocked(() -> { try { @@ -56,7 +63,8 @@ public void testUpgradeFailures() throws InterruptedException { @Test public void testDowngradeSuccess() throws InterruptedException { - final UpdateGraphLock lock = UpdateGraphLock.create(LogicalClock.DEFAULT, false); + final UpdateGraphLock lock = + UpdateGraphLock.create(ExecutionContext.getContext().getUpdateGraph(), false); lock.exclusiveLock().doLocked(() -> { final MutableBoolean success = new MutableBoolean(false); @@ -110,7 +118,8 @@ public void testDowngradeSuccess() throws InterruptedException { @Test public void testSharedLockHeld() { - final UpdateGraphLock lock = UpdateGraphLock.create(LogicalClock.DEFAULT, false); + final UpdateGraphLock lock = + UpdateGraphLock.create(ExecutionContext.getContext().getUpdateGraph(), false); final Consumer checkHeld = (r) -> { TestCase.assertTrue(lock.sharedLock().isHeldByCurrentThread()); lock.sharedLock().doLocked(r::run); @@ -126,7 +135,8 @@ public void testSharedLockHeld() { @Test public void testExclusiveLockHeld() { - final UpdateGraphLock lock = UpdateGraphLock.create(LogicalClock.DEFAULT, false); + final UpdateGraphLock lock = + UpdateGraphLock.create(ExecutionContext.getContext().getUpdateGraph(), false); final Consumer checkHeld = (r) -> { TestCase.assertTrue(lock.exclusiveLock().isHeldByCurrentThread()); lock.exclusiveLock().doLocked(r::run); @@ -141,7 +151,8 @@ public void testExclusiveLockHeld() { @Test public void testConditions() throws InterruptedException { - final UpdateGraphLock lock = UpdateGraphLock.create(LogicalClock.DEFAULT, false); + final UpdateGraphLock lock = + UpdateGraphLock.create(ExecutionContext.getContext().getUpdateGraph(), false); try { lock.sharedLock().newCondition(); TestCase.fail("Unexpectedly got shard lock condition successfully"); @@ -165,7 +176,8 @@ public void testConditions() throws InterruptedException { @Test public void testDebugImplementation() { - final UpdateGraphLock lock = UpdateGraphLock.create(LogicalClock.DEFAULT, true); + final UpdateGraphLock lock = + UpdateGraphLock.create(ExecutionContext.getContext().getUpdateGraph(), true); lock.sharedLock().lock(); lock.sharedLock().lock(); try { diff --git a/extensions/arrow/src/main/java/io/deephaven/extensions/arrow/ArrowWrapperTools.java b/extensions/arrow/src/main/java/io/deephaven/extensions/arrow/ArrowWrapperTools.java index 50f529626ae..ad9c1c8f957 100644 --- a/extensions/arrow/src/main/java/io/deephaven/extensions/arrow/ArrowWrapperTools.java +++ b/extensions/arrow/src/main/java/io/deephaven/extensions/arrow/ArrowWrapperTools.java @@ -3,8 +3,8 @@ import io.deephaven.base.ArrayUtil; import io.deephaven.base.reference.WeakCleanupReference; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ResettableContext; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.file.FileHandle; import io.deephaven.engine.util.file.TrackedFileHandleFactory; import io.deephaven.engine.util.reference.CleanupReferenceProcessorInstance; @@ -121,7 +121,8 @@ * suggested future improvements. */ public class ArrowWrapperTools { - private static final int MAX_POOL_SIZE = Math.max(UpdateGraphProcessor.DEFAULT.getUpdateThreads(), + private static final int MAX_POOL_SIZE = Math.max( + ExecutionContext.getContext().getUpdateGraph().parallelismFactor(), Configuration.getInstance().getIntegerWithDefault("ArrowWrapperTools.defaultMaxPooledContext", 4)); private static final BufferAllocator rootAllocator = new RootAllocator(); diff --git a/extensions/arrow/src/test/java/io/deephaven/extensions/arrow/ArrowWrapperToolsTest.java b/extensions/arrow/src/test/java/io/deephaven/extensions/arrow/ArrowWrapperToolsTest.java index a28fd4725b7..f98dfb56b03 100644 --- a/extensions/arrow/src/test/java/io/deephaven/extensions/arrow/ArrowWrapperToolsTest.java +++ b/extensions/arrow/src/test/java/io/deephaven/extensions/arrow/ArrowWrapperToolsTest.java @@ -5,6 +5,7 @@ import io.deephaven.chunk.WritableIntChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderSequential; @@ -19,6 +20,7 @@ import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.TableTools; import io.deephaven.util.QueryConstants; +import io.deephaven.util.SafeCloseable; import io.deephaven.util.SafeCloseableArray; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; @@ -61,7 +63,6 @@ public void testReadMultiArrowFile() { String path = file.getPath(); Table table = ArrowWrapperTools.readFeather(path); Collection> columnSources = table.getColumnSources(); - List> list = new ArrayList<>(columnSources); final RowSetBuilderSequential builder = RowSetFactory.builderSequential(); builder.appendRange(0, 2); @@ -291,10 +292,11 @@ public void testConcurrentSnapshots() { // Then we'll validate all the results and life will be great final CyclicBarrier barrier = new CyclicBarrier(10); final CountDownLatch latch = new CountDownLatch(10); + final ExecutionContext executionContext = ExecutionContext.getContext(); for (int ii = 0; ii < 10; ii++) { final int threadNo = ii; threads[ii] = new Thread(() -> { - try { + try (final SafeCloseable ignored = executionContext.open()) { barrier.await(); results[threadNo] = InitialSnapshotTable.setupInitialSnapshotTable(expected, diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java index 2edba986263..f563ef28004 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageStreamGeneratorImpl.java @@ -24,7 +24,6 @@ import io.deephaven.configuration.Configuration; import io.deephaven.engine.rowset.*; import io.deephaven.engine.rowset.impl.ExternalizableRowSetUtils; -import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; import io.deephaven.extensions.barrage.util.BarrageProtoUtil.ExposedByteArrayOutputStream; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSubscriptionOptions.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSubscriptionOptions.java index 3b04835b952..459079cd576 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSubscriptionOptions.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/BarrageSubscriptionOptions.java @@ -60,8 +60,8 @@ public boolean useDeephavenNulls() { * * Related, when shortening the minUpdateInterval, you typically want to shorten the server's UGP cycle enough to * update at least as quickly. This can be done on the server with the flag - * {@code io.deephaven.engine.updategraph.UpdateGraphProcessor#defaultTargetCycleTime}, or - * {@code -DUpdateGraphProcessor.targetcycletime=1000}. + * {@code io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph#defaultTargetCycleTime}, or + * {@code -DPeriodicUpdateGraph.targetcycletime=1000}. * * @return the update interval to subscribe for */ diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java index 17c10121a09..5a39d773c50 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/table/BarrageTable.java @@ -10,6 +10,10 @@ import io.deephaven.chunk.ChunkType; import io.deephaven.chunk.util.pools.ChunkPoolConstants; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.LogicalClock; +import io.deephaven.engine.updategraph.NotificationQueue; +import io.deephaven.engine.updategraph.UpdateSourceRegistrar; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.*; import io.deephaven.engine.table.impl.QueryTable; @@ -22,10 +26,7 @@ import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.engine.table.impl.util.LongColumnSourceWritableRowRedirection; import io.deephaven.engine.table.impl.util.WritableRowRedirection; -import io.deephaven.engine.updategraph.LogicalClock; -import io.deephaven.engine.updategraph.NotificationQueue; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; -import io.deephaven.engine.updategraph.UpdateSourceRegistrar; +import io.deephaven.engine.updategraph.*; import io.deephaven.extensions.barrage.BarragePerformanceLog; import io.deephaven.extensions.barrage.BarrageSubscriptionPerformanceLogger; import io.deephaven.internal.log.LoggerFactory; @@ -157,7 +158,7 @@ protected BarrageTable(final UpdateSourceRegistrar registrar, } // we always start empty, and can be notified this cycle if we are refreshed - final long currentClockValue = LogicalClock.DEFAULT.currentValue(); + final long currentClockValue = getUpdateGraph().clock().currentValue(); setLastNotificationStep(LogicalClock.getState(currentClockValue) == LogicalClock.State.Updating ? LogicalClock.getStep(currentClockValue) - 1 : LogicalClock.getStep(currentClockValue)); @@ -368,8 +369,8 @@ public static BarrageTable make( final TableDefinition tableDefinition, final Map attributes, final long initialViewPortRows) { - return make(UpdateGraphProcessor.DEFAULT, UpdateGraphProcessor.DEFAULT, executorService, tableDefinition, - attributes, initialViewPortRows); + final UpdateGraph ug = ExecutionContext.getContext().getUpdateGraph(); + return make(ug, ug, executorService, tableDefinition, attributes, initialViewPortRows); } @VisibleForTesting @@ -459,7 +460,7 @@ protected void saveForDebugging(final BarrageMessage snapshotOrDelta) { processedStep.remove(0); } processedData.add(snapshotOrDelta.clone()); - processedStep.add(LogicalClock.DEFAULT.currentStep()); + processedStep.add(getUpdateGraph().clock().currentStep()); } protected boolean maybeEnablePrevTracking() { diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java index 23c31351432..d21ad80f259 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/ArrowToTableConverter.java @@ -14,7 +14,7 @@ import io.deephaven.engine.rowset.RowSetShiftData; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.util.BarrageMessage; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.chunk.ChunkInputStreamGenerator; import io.deephaven.extensions.barrage.table.BarrageTable; @@ -118,8 +118,9 @@ public synchronized void onCompleted() throws InterruptedException { } final Condition completedCondition; - if (UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread()) { - completedCondition = UpdateGraphProcessor.DEFAULT.exclusiveLock().newCondition(); + final UpdateGraph updateGraph = resultTable.getUpdateGraph(); + if (updateGraph.exclusiveLock().isHeldByCurrentThread()) { + completedCondition = updateGraph.exclusiveLock().newCondition(); } else { completedCondition = null; } @@ -148,7 +149,8 @@ public synchronized void onCompleted() throws InterruptedException { private void signalCompletion(final Condition completedCondition) { if (completedCondition != null) { - UpdateGraphProcessor.DEFAULT.requestSignal(completedCondition); + UpdateGraph updateGraph = resultTable.getUpdateGraph(); + updateGraph.requestSignal(completedCondition); } else { synchronized (ArrowToTableConverter.this) { ArrowToTableConverter.this.notifyAll(); diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java index cf51e23c16c..f62e3cdadb6 100644 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageStreamReader.java @@ -28,7 +28,6 @@ import org.apache.arrow.flatbuf.Message; import org.apache.arrow.flatbuf.MessageHeader; import org.apache.arrow.flatbuf.RecordBatch; -import org.apache.commons.lang3.mutable.MutableInt; import java.io.IOException; import java.io.InputStream; diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java index 33e07d17ad9..9c578a817a6 100755 --- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java +++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/util/BarrageUtil.java @@ -22,7 +22,7 @@ import io.deephaven.engine.table.impl.BaseTable; import io.deephaven.engine.table.impl.remote.ConstructSnapshot; import io.deephaven.engine.table.impl.util.BarrageMessage; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.extensions.barrage.BarragePerformanceLog; import io.deephaven.extensions.barrage.BarrageSnapshotOptions; import io.deephaven.extensions.barrage.BarrageStreamGenerator; @@ -599,7 +599,7 @@ private static Field arrowFieldForVectorType( public static void createAndSendStaticSnapshot( BarrageStreamGenerator.Factory streamGeneratorFactory, - BaseTable table, + BaseTable table, BitSet columns, RowSet viewport, boolean reverseViewport, @@ -672,8 +672,9 @@ public static void createAndSendStaticSnapshot( // very simplistic logic to take the last snapshot and extrapolate max // number of rows that will not exceed the target UGP processing time // percentage + PeriodicUpdateGraph updateGraph = table.getUpdateGraph().cast(); long targetNanos = (long) (TARGET_SNAPSHOT_PERCENTAGE - * UpdateGraphProcessor.DEFAULT.getTargetCycleDurationMillis() + * updateGraph.getTargetCycleDurationMillis() * 1000000); long nanosPerCell = elapsed / (msg.rowsIncluded.size() * columnCount); @@ -697,7 +698,7 @@ public static void createAndSendStaticSnapshot( public static void createAndSendSnapshot( BarrageStreamGenerator.Factory streamGeneratorFactory, - BaseTable table, + BaseTable table, BitSet columns, RowSet viewport, boolean reverseViewport, BarrageSnapshotOptions snapshotRequestOptions, StreamObserver listener, diff --git a/extensions/csv/src/test/java/io/deephaven/csv/DeephavenCsvTest.java b/extensions/csv/src/test/java/io/deephaven/csv/DeephavenCsvTest.java index 05aec1ceb80..87c845b286e 100644 --- a/extensions/csv/src/test/java/io/deephaven/csv/DeephavenCsvTest.java +++ b/extensions/csv/src/test/java/io/deephaven/csv/DeephavenCsvTest.java @@ -5,8 +5,10 @@ import io.deephaven.csv.util.CsvReaderException; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.TableTools; import org.apache.commons.io.input.ReaderInputStream; +import org.junit.Rule; import org.junit.Test; import java.io.StringReader; @@ -19,6 +21,9 @@ public class DeephavenCsvTest { + @Rule + public final EngineCleanup base = new EngineCleanup(); + @Test public void instantCustomTimezone() throws CsvReaderException { final ZoneId nycId = ZoneId.of("America/New_York"); diff --git a/extensions/csv/src/test/java/io/deephaven/csv/TestCsvTools.java b/extensions/csv/src/test/java/io/deephaven/csv/TestCsvTools.java index 073af113f87..0dbeb56e995 100644 --- a/extensions/csv/src/test/java/io/deephaven/csv/TestCsvTools.java +++ b/extensions/csv/src/test/java/io/deephaven/csv/TestCsvTools.java @@ -10,16 +10,14 @@ import io.deephaven.engine.table.impl.DataAccessHelpers; import io.deephaven.engine.table.impl.InMemoryTable; import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.test.types.OutOfBandTest; import io.deephaven.time.DateTimeUtils; import io.deephaven.util.QueryConstants; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.commons.compress.archivers.tar.TarConstants; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.*; import org.junit.experimental.categories.Category; import java.io.*; @@ -38,6 +36,9 @@ @Category({OutOfBandTest.class}) public class TestCsvTools { + @Rule + public final EngineCleanup base = new EngineCleanup(); + private File tmpDir; @Before diff --git a/extensions/jdbc/src/main/java/io/deephaven/jdbc/JdbcToTableAdapter.java b/extensions/jdbc/src/main/java/io/deephaven/jdbc/JdbcToTableAdapter.java index ec672ab8a4f..11d6df9995c 100644 --- a/extensions/jdbc/src/main/java/io/deephaven/jdbc/JdbcToTableAdapter.java +++ b/extensions/jdbc/src/main/java/io/deephaven/jdbc/JdbcToTableAdapter.java @@ -21,6 +21,8 @@ import io.deephaven.engine.table.impl.sources.ChunkedBackingStoreExposedWritableSource; import io.deephaven.engine.table.impl.sources.InMemoryColumnSource; import io.deephaven.time.DateTimeUtils; +import io.deephaven.util.SafeCloseable; +import io.deephaven.util.SafeCloseableList; import io.deephaven.util.datastructures.LongSizedDataStructure; import org.jetbrains.annotations.NotNull; @@ -221,55 +223,51 @@ public static Table readJdbc(final ResultSet rs, final ReadJdbcOptions options, final SourceFiller[] sourceFillers = new SourceFiller[numColumns]; final HashMap> columnMap = new LinkedHashMap<>(); - for (int ii = 0; ii < numColumns; ++ii) { - final int columnIndex = rs.findColumn(origColumnNames[ii]); - final String columnName = columnNames[ii]; - final Class destType = options.targetTypeMap.get(columnName); - - final JdbcTypeMapper.DataTypeMapping typeMapping = - JdbcTypeMapper.getColumnTypeMapping(rs, columnIndex, destType); - - final Class deephavenType = typeMapping.getDeephavenType(); - final Class componentType = deephavenType.getComponentType(); - final WritableColumnSource cs = numRows == 0 - ? ArrayBackedColumnSource.getMemoryColumnSource(0, deephavenType, componentType) - : InMemoryColumnSource.getImmutableMemoryColumnSource(numRows, deephavenType, componentType); - - if (numRows > 0) { - cs.ensureCapacity(numRows, false); - } + long numRowsRead = 0; + try (final SafeCloseableList toClose = new SafeCloseableList()) { + for (int ii = 0; ii < numColumns; ++ii) { + final int columnIndex = rs.findColumn(origColumnNames[ii]); + final String columnName = columnNames[ii]; + final Class destType = options.targetTypeMap.get(columnName); + + final JdbcTypeMapper.DataTypeMapping typeMapping = + JdbcTypeMapper.getColumnTypeMapping(rs, columnIndex, destType); + + final Class deephavenType = typeMapping.getDeephavenType(); + final Class componentType = deephavenType.getComponentType(); + final WritableColumnSource cs = numRows == 0 + ? ArrayBackedColumnSource.getMemoryColumnSource(0, deephavenType, componentType) + : InMemoryColumnSource.getImmutableMemoryColumnSource(numRows, deephavenType, componentType); + + if (numRows > 0) { + cs.ensureCapacity(numRows, false); + } - if (ChunkedBackingStoreExposedWritableSource.exposesChunkedBackingStore(cs)) { - sourceFillers[ii] = new BackingStoreSourceFiller(columnIndex, typeMapping, cs); - } else { - sourceFillers[ii] = new ChunkFlushingSourceFiller(columnIndex, typeMapping, cs); - } + if (ChunkedBackingStoreExposedWritableSource.exposesChunkedBackingStore(cs)) { + sourceFillers[ii] = toClose.add(new BackingStoreSourceFiller(columnIndex, typeMapping, cs)); + } else { + sourceFillers[ii] = toClose.add(new ChunkFlushingSourceFiller(columnIndex, typeMapping, cs)); + } - columnMap.put(columnName, cs); - } + columnMap.put(columnName, cs); + } - final JdbcTypeMapper.Context context = JdbcTypeMapper.Context.of( - options.sourceTimeZone, options.arrayDelimiter, options.strict); + final JdbcTypeMapper.Context context = JdbcTypeMapper.Context.of( + options.sourceTimeZone, options.arrayDelimiter, options.strict); - long numRowsRead = 0; - while (rs.next() && (options.maxRows == -1 || numRowsRead < options.maxRows)) { - for (SourceFiller filler : sourceFillers) { - filler.readRow(rs, context, numRowsRead); + while (rs.next() && (options.maxRows == -1 || numRowsRead < options.maxRows)) { + for (SourceFiller filler : sourceFillers) { + filler.readRow(rs, context, numRowsRead); + } + ++numRowsRead; } - ++numRowsRead; - } - - for (SourceFiller filler : sourceFillers) { - filler.close(); } return new QueryTable(RowSetFactory.flat(numRowsRead).toTracking(), columnMap); } - private interface SourceFiller { + private interface SourceFiller extends SafeCloseable { void readRow(ResultSet rs, JdbcTypeMapper.Context context, long destRowKey) throws SQLException; - - void close(); } private static class BackingStoreSourceFiller implements SourceFiller { diff --git a/extensions/jdbc/src/test/java/io/deephaven/jdbc/JdbcToTableAdapterTest.java b/extensions/jdbc/src/test/java/io/deephaven/jdbc/JdbcToTableAdapterTest.java index a1afaf6007d..d301bf9d153 100644 --- a/extensions/jdbc/src/test/java/io/deephaven/jdbc/JdbcToTableAdapterTest.java +++ b/extensions/jdbc/src/test/java/io/deephaven/jdbc/JdbcToTableAdapterTest.java @@ -5,16 +5,14 @@ import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.Table; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.TableTools; import io.deephaven.time.DateTimeFormatter; import io.deephaven.time.DateTimeFormatters; import io.deephaven.time.DateTimeUtils; import io.deephaven.util.QueryConstants; import io.deephaven.util.function.ThrowingRunnable; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.*; import java.math.BigDecimal; import java.sql.Connection; @@ -31,6 +29,9 @@ public class JdbcToTableAdapterTest { + @Rule + public final EngineCleanup framework = new EngineCleanup(); + private static final ZoneId TZ_UTC = ZoneId.of("UTC"); private Connection conn; diff --git a/extensions/kafka/src/main/java/io/deephaven/kafka/KafkaTools.java b/extensions/kafka/src/main/java/io/deephaven/kafka/KafkaTools.java index 26646ac88cd..3a006226b36 100644 --- a/extensions/kafka/src/main/java/io/deephaven/kafka/KafkaTools.java +++ b/extensions/kafka/src/main/java/io/deephaven/kafka/KafkaTools.java @@ -17,6 +17,7 @@ import io.deephaven.UncheckedDeephavenException; import io.deephaven.annotations.SimpleStyle; import io.deephaven.base.Pair; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.RowSetShiftData; @@ -29,7 +30,7 @@ import io.deephaven.engine.table.impl.partitioned.PartitionedTableImpl; import io.deephaven.engine.table.impl.sources.ArrayBackedColumnSource; import io.deephaven.engine.table.impl.sources.ring.RingTableTools; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.updategraph.UpdateSourceCombiner; import io.deephaven.engine.updategraph.UpdateSourceRegistrar; import io.deephaven.kafka.KafkaTools.TableType.Append; @@ -1331,7 +1332,7 @@ private static class TableResultFactory implements ResultFactory

{ @Override public UpdateSourceRegistrar getSourceRegistrar() { - return UpdateGraphProcessor.DEFAULT; + return ExecutionContext.getContext().getUpdateGraph(); } @Override @@ -1354,7 +1355,8 @@ public Pair> makeResultAndConsumerFactor private static class PartitionedTableResultFactory implements ResultFactory { - private final UpdateSourceCombiner refreshCombiner = new UpdateSourceCombiner(); + private final UpdateSourceCombiner refreshCombiner = + new UpdateSourceCombiner(ExecutionContext.getContext().getUpdateGraph()); @Override public UpdateSourceRegistrar getSourceRegistrar() { @@ -1618,10 +1620,10 @@ public static Runnable produceFromTable( @NotNull final Produce.KeyOrValueSpec valueSpec, final boolean lastByKeyColumns) { if (table.isRefreshing() - && !UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread() - && !UpdateGraphProcessor.DEFAULT.sharedLock().isHeldByCurrentThread()) { + && !table.getUpdateGraph().exclusiveLock().isHeldByCurrentThread() + && !table.getUpdateGraph().sharedLock().isHeldByCurrentThread()) { throw new KafkaPublisherException( - "Calling thread must hold an exclusive or shared UpdateGraphProcessor lock to publish live sources"); + "Calling thread must hold an exclusive or shared UpdateGraph lock to publish live sources"); } final boolean ignoreKey = keySpec.dataFormat() == DataFormat.IGNORE; @@ -1713,7 +1715,7 @@ private static String getSerializerNameForSimpleSpec( } /** - * @implNote The constructor publishes {@code this} to the {@link UpdateGraphProcessor} and cannot be subclassed. + * @implNote The constructor publishes {@code this} to the {@link UpdateGraph} and cannot be subclassed. */ private static final class StreamPartitionedTable extends PartitionedTableImpl implements Runnable { @@ -1740,7 +1742,8 @@ private StreamPartitionedTable( (WritableColumnSource
) table().getColumnSource(CONSTITUENT_COLUMN_NAME, Table.class); manage(refreshCombiner); refreshCombiner.addSource(this); - UpdateGraphProcessor.DEFAULT.addSource(refreshCombiner); + UpdateGraph updateGraph = table().getUpdateGraph(); + updateGraph.addSource(refreshCombiner); } @Override diff --git a/extensions/kafka/src/main/java/io/deephaven/kafka/publish/PublishToKafka.java b/extensions/kafka/src/main/java/io/deephaven/kafka/publish/PublishToKafka.java index 91bb012b426..77dd6c45781 100644 --- a/extensions/kafka/src/main/java/io/deephaven/kafka/publish/PublishToKafka.java +++ b/extensions/kafka/src/main/java/io/deephaven/kafka/publish/PublishToKafka.java @@ -9,7 +9,7 @@ import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableUpdate; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.liveness.LivenessArtifact; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.table.impl.*; @@ -54,12 +54,11 @@ public class PublishToKafka extends LivenessArtifact { * The new publisher will produce records for existing {@code table} data at construction. *

* If {@code table} is a dynamic, refreshing table ({@link Table#isRefreshing()}), the calling thread must block the - * {@link UpdateGraphProcessor#DEFAULT UpdateGraphProcessor} by holding either its - * {@link UpdateGraphProcessor#exclusiveLock() exclusive lock} or its {@link UpdateGraphProcessor#sharedLock() - * shared lock}. The publisher will install a listener in order to produce new records as updates become available. - * Callers must be sure to maintain a reference to the publisher and ensure that it remains - * {@link io.deephaven.engine.liveness.LivenessReferent live}. The easiest way to do this may be to construct the - * publisher enclosed by a {@link io.deephaven.engine.liveness.LivenessScope liveness scope} with + * {@link UpdateGraph update graph} by holding either its {@link UpdateGraph#exclusiveLock() exclusive lock} or its + * {@link UpdateGraph#sharedLock() shared lock}. The publisher will install a listener in order to produce new + * records as updates become available. Callers must be sure to maintain a reference to the publisher and ensure + * that it remains {@link io.deephaven.engine.liveness.LivenessReferent live}. The easiest way to do this may be to + * construct the publisher enclosed by a {@link io.deephaven.engine.liveness.LivenessScope liveness scope} with * {@code enforceStrongReachability} specified as {@code true}, and {@link LivenessScope#release() release} the * scope when publication is no longer needed. For example: * diff --git a/extensions/parquet/benchmark/build.gradle b/extensions/parquet/benchmark/build.gradle index c0b41bf4226..24b46ebb280 100644 --- a/extensions/parquet/benchmark/build.gradle +++ b/extensions/parquet/benchmark/build.gradle @@ -58,7 +58,6 @@ task jmhRun(type: JavaExec) { '-Dconfiguration.quiet=true', '-Djava.awt.headless=true', '-DQueryTable.memoizeResults=false', - '-DUpdateGraphProcessor.checkTableOperations=false', '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=9500' } @@ -77,7 +76,6 @@ def createJmhTask = { '-Dconfiguration.quiet=true', '-Djava.awt.headless=true', '-DQueryTable.memoizeResults=false', - '-DUpdateGraphProcessor.checkTableOperations=false', "-Xmx$heapSize" //'-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=9501' ] diff --git a/extensions/parquet/table/src/main/java/io/deephaven/parquet/table/ParquetTools.java b/extensions/parquet/table/src/main/java/io/deephaven/parquet/table/ParquetTools.java index 883819a93ae..f5e0e697def 100644 --- a/extensions/parquet/table/src/main/java/io/deephaven/parquet/table/ParquetTools.java +++ b/extensions/parquet/table/src/main/java/io/deephaven/parquet/table/ParquetTools.java @@ -8,11 +8,11 @@ import io.deephaven.base.FileUtils; import io.deephaven.base.Pair; import io.deephaven.base.verify.Require; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.table.impl.locations.util.TableDataRefreshService; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.vector.*; import io.deephaven.stringset.StringSet; @@ -484,7 +484,7 @@ public static Table readPartitionedTable( : "Read multiple parquet files with " + locationKeyFinder, RegionedTableComponentFactoryImpl.INSTANCE, locationProvider, - readInstructions.isRefreshing() ? UpdateGraphProcessor.DEFAULT : null); + readInstructions.isRefreshing() ? ExecutionContext.getContext().getUpdateGraph() : null); } /** diff --git a/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestParquetGrouping.java b/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestParquetGrouping.java index 53590a19730..1daa67cffd9 100644 --- a/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestParquetGrouping.java +++ b/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestParquetGrouping.java @@ -4,34 +4,19 @@ package io.deephaven.parquet.table; import io.deephaven.base.FileUtils; -import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.ColumnDefinition; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.TableDefinition; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.rowset.RowSetFactory; -import io.deephaven.util.SafeCloseable; -import junit.framework.TestCase; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.Map; -public class TestParquetGrouping extends TestCase { - private SafeCloseable executionContext; - - @Override - protected void setUp() throws Exception { - super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - executionContext.close(); - } +public class TestParquetGrouping extends RefreshingTableTestCase { public void testOverflow() throws IOException { // TODO: Figure out why this is called testOverflow diff --git a/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestParquetTools.java b/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestParquetTools.java index 9c94ef723d1..ba2741bdadf 100644 --- a/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestParquetTools.java +++ b/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestParquetTools.java @@ -13,7 +13,6 @@ import io.deephaven.engine.table.impl.InMemoryTable; import io.deephaven.engine.table.impl.locations.TableDataException; import io.deephaven.engine.testutil.junit4.EngineCleanup; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.TableTools; import io.deephaven.parquet.table.layout.ParquetKeyValuePartitionedLayout; import io.deephaven.stringset.HashStringSet; @@ -48,15 +47,15 @@ public class TestParquetTools { @Rule public final EngineCleanup framework = new EngineCleanup(); + private Table table1; + private Table emptyTable; + private Table brokenTable; + private String testRoot; private File testRootFile; - private static Table table1; - private static Table emptyTable; - private static Table brokenTable; - - @BeforeClass - public static void setUpFirst() { + @Before + public void setUp() throws IOException { table1 = new InMemoryTable( new String[] {"StringKeys", "GroupedInts"}, new Object[] { @@ -70,19 +69,9 @@ public static void setUpFirst() { new byte[] {} }); brokenTable = (Table) Proxy.newProxyInstance(Table.class.getClassLoader(), new Class[] {Table.class}, - new InvocationHandler() { - @Override - public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { - throw new UnsupportedOperationException("This table is broken!"); - } + (proxy, method, args) -> { + throw new UnsupportedOperationException("This table is broken!"); }); - } - - @Before - public void setUp() throws IOException { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - testRootFile = Files.createTempDirectory(TestParquetTools.class.getName()).toFile(); testRoot = testRootFile.toString(); } diff --git a/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestSymbolTableSource.java b/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestSymbolTableSource.java index 837867a640c..3c0a52d6b36 100644 --- a/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestSymbolTableSource.java +++ b/extensions/parquet/table/src/test/java/io/deephaven/parquet/table/TestSymbolTableSource.java @@ -3,12 +3,10 @@ import io.deephaven.base.FileUtils; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.sources.regioned.SymbolTableSource; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.TableTools; import io.deephaven.engine.util.file.TrackedFileHandleFactory; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.*; import java.io.File; import java.io.IOException; @@ -20,6 +18,10 @@ * Unit tests for Parquet symbol tables */ public class TestSymbolTableSource { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + private File dataDirectory; @Before diff --git a/extensions/source-support/src/test/java/io/deephaven/generic/region/AppendOnlyFixedSizePageRegionTest.java b/extensions/source-support/src/test/java/io/deephaven/generic/region/AppendOnlyFixedSizePageRegionTest.java index 9b3b1de151b..b7ccdbec077 100644 --- a/extensions/source-support/src/test/java/io/deephaven/generic/region/AppendOnlyFixedSizePageRegionTest.java +++ b/extensions/source-support/src/test/java/io/deephaven/generic/region/AppendOnlyFixedSizePageRegionTest.java @@ -4,6 +4,7 @@ import io.deephaven.base.verify.Assert; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.attributes.Values; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.RowSequence; import io.deephaven.engine.rowset.RowSequenceFactory; import io.deephaven.engine.table.*; @@ -13,11 +14,11 @@ import io.deephaven.engine.table.impl.select.SimulationClock; import io.deephaven.engine.table.impl.sources.ReinterpretUtils; import io.deephaven.engine.table.impl.sources.regioned.*; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.updategraph.AbstractNotification; import io.deephaven.engine.updategraph.NotificationQueue; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.UpdateSourceRegistrar; import io.deephaven.engine.util.TableTools; import io.deephaven.io.log.impl.LogOutputStringImpl; @@ -50,9 +51,9 @@ public void testCorrectness() { final Instant endTime = DateTimeUtils.plus(startTime, 1_000_000_000L); final SimulationClock clock = new SimulationClock(startTime, endTime, 100_000_000L); final TimeTable[] timeTables = new TimeTable[] { - new TimeTable(UpdateGraphProcessor.DEFAULT, clock, startTime, 1000, false), - new TimeTable(UpdateGraphProcessor.DEFAULT, clock, startTime, 10000, false), - new TimeTable(UpdateGraphProcessor.DEFAULT, clock, startTime, 100000, false) + new TimeTable(ExecutionContext.getContext().getUpdateGraph(), clock, startTime, 1000, false), + new TimeTable(ExecutionContext.getContext().getUpdateGraph(), clock, startTime, 10000, false), + new TimeTable(ExecutionContext.getContext().getUpdateGraph(), clock, startTime, 100000, false) }; final Table[] withTypes = addTypes(timeTables); final DependentRegistrar dependentRegistrar = new DependentRegistrar(withTypes); @@ -61,8 +62,9 @@ public void testCorrectness() { System.out.println("Initial start time: " + clock.instantNanos()); TstUtils.assertTableEquals(expected, actual); clock.start(); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); while (!clock.done()) { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { clock.advance(); for (final TimeTable timeTable : timeTables) { timeTable.run(); @@ -116,7 +118,7 @@ private static final class DependentRegistrar implements UpdateSourceRegistrar, private DependentRegistrar(@NotNull final NotificationQueue.Dependency... dependencies) { this.dependencies = dependencies; - UpdateGraphProcessor.DEFAULT.addSource(this); + ExecutionContext.getContext().getUpdateGraph().addSource(this); } @Override @@ -131,12 +133,12 @@ public synchronized void removeSource(@NotNull final Runnable updateSource) { @Override public void requestRefresh() { - UpdateGraphProcessor.DEFAULT.requestRefresh(); + ExecutionContext.getContext().getUpdateGraph().requestRefresh(); } @Override public void run() { - UpdateGraphProcessor.DEFAULT.addNotification(new AbstractNotification(false) { + ExecutionContext.getContext().getUpdateGraph().addNotification(new AbstractNotification(false) { @Override public boolean canExecute(final long step) { synchronized (DependentRegistrar.this) { diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java index 9a55c0443c5..c44535cd1ec 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSnapshotImpl.java @@ -15,13 +15,11 @@ import io.deephaven.engine.table.TableDefinition; import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.engine.table.impl.util.BarrageMessage.Listener; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.extensions.barrage.BarrageSnapshotOptions; import io.deephaven.extensions.barrage.table.BarrageTable; import io.deephaven.extensions.barrage.util.*; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; -import io.deephaven.tablelogger.Row; import io.grpc.CallOptions; import io.grpc.ClientCall; import io.grpc.Context; @@ -180,15 +178,15 @@ public synchronized BarrageTable partialTable(RowSet viewport, BitSet columns, b } // test lock conditions - if (UpdateGraphProcessor.DEFAULT.sharedLock().isHeldByCurrentThread()) { + if (resultTable.getUpdateGraph().sharedLock().isHeldByCurrentThread()) { throw new UnsupportedOperationException( - "Cannot snapshot while holding the UpdateGraphProcessor shared lock"); + "Cannot snapshot while holding the UpdateGraph shared lock"); } prevUsed = true; - if (UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread()) { - completedCondition = UpdateGraphProcessor.DEFAULT.exclusiveLock().newCondition(); + if (resultTable.getUpdateGraph().exclusiveLock().isHeldByCurrentThread()) { + completedCondition = resultTable.getUpdateGraph().exclusiveLock().newCondition(); } if (!connected) { @@ -244,7 +242,7 @@ private void handleDisconnect() { private void signalCompletion() { if (completedCondition != null) { - UpdateGraphProcessor.DEFAULT.requestSignal(completedCondition); + resultTable.getUpdateGraph().requestSignal(completedCondition); } else { synchronized (BarrageSnapshotImpl.this) { BarrageSnapshotImpl.this.notifyAll(); diff --git a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java index 3db286f157b..40be5cb50c7 100644 --- a/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java +++ b/java-client/barrage/src/main/java/io/deephaven/client/impl/BarrageSubscriptionImpl.java @@ -18,7 +18,6 @@ import io.deephaven.engine.table.impl.InstrumentedTableUpdateListener; import io.deephaven.engine.table.impl.util.BarrageMessage; import io.deephaven.engine.table.impl.util.BarrageMessage.Listener; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.extensions.barrage.BarrageSubscriptionOptions; import io.deephaven.extensions.barrage.table.BarrageTable; import io.deephaven.extensions.barrage.util.*; @@ -194,13 +193,13 @@ public synchronized BarrageTable partialTable(RowSet viewport, BitSet columns, b "BarrageSubscription objects cannot be reused."); } else { // test lock conditions - if (UpdateGraphProcessor.DEFAULT.sharedLock().isHeldByCurrentThread()) { + if (resultTable.getUpdateGraph().sharedLock().isHeldByCurrentThread()) { throw new UnsupportedOperationException( - "Cannot create subscription while holding the UpdateGraphProcessor shared lock"); + "Cannot create subscription while holding the UpdateGraph shared lock"); } - if (UpdateGraphProcessor.DEFAULT.exclusiveLock().isHeldByCurrentThread()) { - completedCondition = UpdateGraphProcessor.DEFAULT.exclusiveLock().newCondition(); + if (resultTable.getUpdateGraph().exclusiveLock().isHeldByCurrentThread()) { + completedCondition = resultTable.getUpdateGraph().exclusiveLock().newCondition(); } // Send the initial subscription: @@ -230,7 +229,7 @@ protected void destroy() { protected void onFailureInternal(final Throwable originalException, final Entry sourceEntry) { exceptionWhileCompleting = originalException; if (completedCondition != null) { - UpdateGraphProcessor.DEFAULT.requestSignal(completedCondition); + resultTable.getUpdateGraph().requestSignal(completedCondition); } else { synchronized (BarrageSubscriptionImpl.this) { BarrageSubscriptionImpl.this.notifyAll(); @@ -294,7 +293,7 @@ public void onUpdate(final TableUpdate upstream) { private void signalCompletion() { completed = true; if (completedCondition != null) { - UpdateGraphProcessor.DEFAULT.requestSignal(completedCondition); + resultTable.getUpdateGraph().requestSignal(completedCondition); } else { synchronized (BarrageSubscriptionImpl.this) { BarrageSubscriptionImpl.this.notifyAll(); diff --git a/java-client/flight-dagger/build.gradle b/java-client/flight-dagger/build.gradle index aeef6e62fc4..d143bac5742 100644 --- a/java-client/flight-dagger/build.gradle +++ b/java-client/flight-dagger/build.gradle @@ -29,6 +29,6 @@ dependencies { testImplementation project(':log-to-slf4j') } -test.systemProperty "UpdateGraphProcessor.allowUnitTestMode", false +test.systemProperty "PeriodicUpdateGraph.allowUnitTestMode", false apply plugin: 'io.deephaven.java-open-nio' diff --git a/java-client/flight-dagger/src/test/java/io/deephaven/client/DeephavenFlightSessionTestBase.java b/java-client/flight-dagger/src/test/java/io/deephaven/client/DeephavenFlightSessionTestBase.java index 3f78af44c0a..3d24c941b6d 100644 --- a/java-client/flight-dagger/src/test/java/io/deephaven/client/DeephavenFlightSessionTestBase.java +++ b/java-client/flight-dagger/src/test/java/io/deephaven/client/DeephavenFlightSessionTestBase.java @@ -20,7 +20,6 @@ public abstract class DeephavenFlightSessionTestBase extends DeephavenApiServerTestBase { - SafeCloseable executionContext; BufferAllocator bufferAllocator; ScheduledExecutorService sessionScheduler; FlightSession flightSession; @@ -29,7 +28,6 @@ public abstract class DeephavenFlightSessionTestBase extends DeephavenApiServerT @Before public void setUp() throws Exception { super.setUp(); - executionContext = TestExecutionContext.createForUnitTests().open(); ManagedChannel channel = channelBuilder().build(); register(channel); sessionScheduler = Executors.newScheduledThreadPool(2); @@ -48,7 +46,6 @@ public void tearDown() throws Exception { if (!sessionScheduler.awaitTermination(5, TimeUnit.SECONDS)) { throw new RuntimeException("Scheduler not shutdown within 5 seconds"); } - executionContext.close(); super.tearDown(); } } diff --git a/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/ColumnExpressionCompletionHandlerTest.groovy b/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/ColumnExpressionCompletionHandlerTest.groovy index 1634a09023a..89cf1c7a665 100644 --- a/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/ColumnExpressionCompletionHandlerTest.groovy +++ b/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/ColumnExpressionCompletionHandlerTest.groovy @@ -1,7 +1,7 @@ package io.deephaven.lang.completion -import io.deephaven.base.clock.Clock; -import io.deephaven.engine.context.TestExecutionContext +import io.deephaven.base.clock.Clock +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.table.Table import io.deephaven.engine.table.TableDefinition import io.deephaven.engine.util.VariableProvider diff --git a/plugin/gc-app/src/main/java/io/deephaven/app/GcApplication.java b/plugin/gc-app/src/main/java/io/deephaven/app/GcApplication.java index 51a68c6e13a..3a2daf65cb7 100644 --- a/plugin/gc-app/src/main/java/io/deephaven/app/GcApplication.java +++ b/plugin/gc-app/src/main/java/io/deephaven/app/GcApplication.java @@ -3,11 +3,11 @@ import com.sun.management.GarbageCollectionNotificationInfo; import io.deephaven.appmode.ApplicationState; import io.deephaven.appmode.ApplicationState.Listener; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.sources.ring.RingTableTools; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.stream.StreamToBlinkTableAdapter; import io.deephaven.util.SafeCloseable; @@ -162,7 +162,7 @@ public ApplicationState create(Listener listener) { private void setNotificationInfo(ApplicationState state) { notificationInfoPublisher = new GcNotificationPublisher(); final StreamToBlinkTableAdapter adapter = new StreamToBlinkTableAdapter(GcNotificationPublisher.definition(), - notificationInfoPublisher, UpdateGraphProcessor.DEFAULT, NOTIFICATION_INFO); + notificationInfoPublisher, ExecutionContext.getContext().getUpdateGraph(), NOTIFICATION_INFO); final Table notificationInfo = adapter.table(); state.setField(NOTIFICATION_INFO, notificationInfo); if (notificationInfoStatsEnabled()) { @@ -177,7 +177,7 @@ private void setNotificationInfo(ApplicationState state) { private void setPools(ApplicationState state) { poolsPublisher = new GcPoolsPublisher(); final StreamToBlinkTableAdapter adapter = new StreamToBlinkTableAdapter(GcPoolsPublisher.definition(), - poolsPublisher, UpdateGraphProcessor.DEFAULT, POOLS); + poolsPublisher, ExecutionContext.getContext().getUpdateGraph(), POOLS); final Table pools = adapter.table(); state.setField(POOLS, pools); if (poolStatsEnabled()) { diff --git a/props/configs/src/main/resources/dh-defaults.prop b/props/configs/src/main/resources/dh-defaults.prop index d3dbbbdc2b6..232c7694a98 100644 --- a/props/configs/src/main/resources/dh-defaults.prop +++ b/props/configs/src/main/resources/dh-defaults.prop @@ -4,7 +4,6 @@ UpdatePerformanceTracker.reportIntervalMillis=60000 measurement.per_thread_cpu=false allocation.stats.enabled=false statsdriver.enabled=true -UpdateGraphProcessor.checkTableOperations=true # # NIO driver thread pool diff --git a/props/test-configs/src/main/resources/dh-tests.prop b/props/test-configs/src/main/resources/dh-tests.prop index 8457c2441de..e665197dfca 100644 --- a/props/test-configs/src/main/resources/dh-tests.prop +++ b/props/test-configs/src/main/resources/dh-tests.prop @@ -52,8 +52,7 @@ jpy.jdlLib=/usr/local/lib/python2.7/dist-packages/jdl.so,/Library/Python/2.7/sit Comm.fatalErrorHandlerFactoryClass=io.deephaven.console.utils.ProcessEnvironmentRedirectFactory -UpdateGraphProcessor.checkTableOperations=true -UpdateGraphProcessor.allowUnitTestMode=true +PeriodicUpdateGraph.allowUnitTestMode=true QueryPerformanceRecorder.packageFilter.internal=defaultPackageFilters.qpr diff --git a/py/server/deephaven/execution_context.py b/py/server/deephaven/execution_context.py index 908fe2d7fc4..b1d60776273 100644 --- a/py/server/deephaven/execution_context.py +++ b/py/server/deephaven/execution_context.py @@ -16,6 +16,7 @@ from deephaven.jcompat import to_sequence _JExecutionContext = jpy.get_type("io.deephaven.engine.context.ExecutionContext") +_JUpdateGraph = jpy.get_type("io.deephaven.engine.updategraph.UpdateGraph") class ExecutionContext(JObjectWrapper, ContextDecorator): @@ -36,6 +37,10 @@ class ExecutionContext(JObjectWrapper, ContextDecorator): def j_object(self) -> jpy.JType: return self.j_exec_ctx + @property + def update_graph(self) -> _JUpdateGraph: + return self.j_exec_ctx.getUpdateGraph() + def __init__(self, j_exec_ctx): self.j_exec_ctx = j_exec_ctx @@ -73,6 +78,7 @@ def make_user_exec_ctx(freeze_vars: Union[str, Sequence[str]] = None) -> Executi .captureQueryCompiler() .captureQueryLibrary() .captureQueryScopeVars(*freeze_vars) + .captureUpdateGraph() .build()) return ExecutionContext(j_exec_ctx=j_exec_ctx) except Exception as e: diff --git a/py/server/deephaven/experimental/outer_joins.py b/py/server/deephaven/experimental/outer_joins.py index 7395dd21079..f475449cc44 100644 --- a/py/server/deephaven/experimental/outer_joins.py +++ b/py/server/deephaven/experimental/outer_joins.py @@ -13,7 +13,7 @@ from deephaven.table import Table import jpy -from deephaven.ugp import auto_locking_ctx +from deephaven.update_graph import auto_locking_ctx _JOuterJoinTools = jpy.get_type("io.deephaven.engine.util.OuterJoinTools") diff --git a/py/server/deephaven/table.py b/py/server/deephaven/table.py index 9da89ed21c9..94bb111f4b6 100644 --- a/py/server/deephaven/table.py +++ b/py/server/deephaven/table.py @@ -27,7 +27,7 @@ from deephaven.filters import Filter, and_, or_ from deephaven.jcompat import j_unary_operator, j_binary_operator, j_map_to_dict, j_hashmap from deephaven.jcompat import to_sequence, j_array_list -from deephaven.ugp import auto_locking_ctx +from deephaven.update_graph import auto_locking_ctx from deephaven.updateby import UpdateByOperation # Table @@ -42,6 +42,7 @@ _JLayoutHintBuilder = jpy.get_type("io.deephaven.engine.util.LayoutHintBuilder") _JSearchDisplayMode = jpy.get_type("io.deephaven.engine.util.LayoutHintBuilder$SearchDisplayModes") _JSnapshotWhenOptions = jpy.get_type("io.deephaven.api.snapshot.SnapshotWhenOptions") +_JUpdateGraph = jpy.get_type("io.deephaven.engine.updategraph.UpdateGraph") # PartitionedTable _JPartitionedTable = jpy.get_type("io.deephaven.engine.table.PartitionedTable") @@ -507,6 +508,7 @@ def __init__(self, j_table: jpy.JType): self._definition = self.j_table.getDefinition() self._schema = None self._is_refreshing = None + self._update_graph = None self._is_flat = None def __repr__(self): @@ -537,6 +539,15 @@ def is_refreshing(self) -> bool: self._is_refreshing = self.j_table.isRefreshing() return self._is_refreshing + @property + def update_graph(self) -> _JUpdateGraph: + """None if not refreshing otherwise is this table's update graph.""" + if self.is_refreshing: + if self._update_graph is None: + self._update_graph = self.j_table.getUpdateGraph() + return self._update_graph + return None + @property def is_flat(self) -> bool: """Whether this table is guaranteed to be flat, i.e. its row set will be from 0 to number of rows - 1.""" @@ -2298,6 +2309,11 @@ def table(self) -> Table: self._table = Table(j_table=self.j_partitioned_table.table()) return self._table + @property + def update_graph(self) -> _JUpdateGraph: + """The underlying partitioned table's update graph.""" + return self.table.update_graph + @property def is_refreshing(self) -> bool: """Whether the underlying partitioned table is refreshing.""" @@ -2554,6 +2570,11 @@ def is_refreshing(self) -> bool: """Whether this proxy represents a refreshing partitioned table.""" return self.target.is_refreshing + @property + def update_graph(self) -> _JUpdateGraph: + """The underlying partitioned table proxy's update graph.""" + return self.target.update_graph + def __init__(self, j_pt_proxy): self.j_pt_proxy = jpy.cast(j_pt_proxy, _JPartitionedTableProxy) self.require_matching_keys = self.j_pt_proxy.requiresMatchingKeys() diff --git a/py/server/deephaven/table_factory.py b/py/server/deephaven/table_factory.py index 9f9311dfdcf..5c652ec8c19 100644 --- a/py/server/deephaven/table_factory.py +++ b/py/server/deephaven/table_factory.py @@ -14,7 +14,7 @@ from deephaven.dtypes import DType from deephaven.jcompat import to_sequence from deephaven.table import Table -from deephaven.ugp import auto_locking_ctx +from deephaven.update_graph import auto_locking_ctx _JTableFactory = jpy.get_type("io.deephaven.engine.table.TableFactory") _JTableTools = jpy.get_type("io.deephaven.engine.util.TableTools") diff --git a/py/server/deephaven/table_listener.py b/py/server/deephaven/table_listener.py index 03d735cdc37..0148958ff7f 100644 --- a/py/server/deephaven/table_listener.py +++ b/py/server/deephaven/table_listener.py @@ -14,7 +14,7 @@ import numpy from deephaven import DHError -from deephaven import ugp +from deephaven import update_graph from deephaven._wrapper import JObjectWrapper from deephaven.column import Column from deephaven.jcompat import to_sequence @@ -25,6 +25,7 @@ _JPythonReplayListenerAdapter = jpy.get_type("io.deephaven.integrations.python.PythonReplayListenerAdapter") _JTableUpdate = jpy.get_type("io.deephaven.engine.table.TableUpdate") _JTableUpdateDataReader = jpy.get_type("io.deephaven.integrations.python.PythonListenerTableUpdateDataReader") +_JUpdateGraph = jpy.get_type("io.deephaven.engine.updategraph.UpdateGraph") def _col_defs(table: Table, cols: Union[str, List[str]]) -> List[Column]: @@ -237,25 +238,29 @@ def modified_columns(self) -> List[str]: return list(cols) if cols else [] -def _do_locked(f: Callable, lock_type="shared") -> None: - """Executes a function while holding the UpdateGraphProcessor (UGP) lock. Holding the UGP lock +def _do_locked(ug: Union[_JUpdateGraph, Table], f: Callable, lock_type="shared") -> None: + """Executes a function while holding the UpdateGraph (UG) lock. Holding the UG lock ensures that the contents of a table will not change during a computation, but holding the lock also prevents table updates from happening. The lock should be held for as little time as possible. Args: - f (Callable): callable to execute while holding the UGP lock, could be function or an object with an 'apply' + ug (Union[_JUpdateGraph, Table]): The Update Graph (UG) or a table-like object. + f (Callable): callable to execute while holding the UG lock, could be function or an object with an 'apply' attribute which is callable - lock_type (str): UGP lock type, valid values are "exclusive" and "shared". "exclusive" allows only a single + lock_type (str): UG lock type, valid values are "exclusive" and "shared". "exclusive" allows only a single reader or writer to hold the lock. "shared" allows multiple readers or a single writer to hold the lock. Raises: ValueError """ + if isinstance(ug, Table): + ug = ug.update_graph + if lock_type == "exclusive": - with ugp.exclusive_lock(): + with update_graph.exclusive_lock(ug): f() elif lock_type == "shared": - with ugp.shared_lock(): + with update_graph.shared_lock(ug): f() else: raise ValueError(f"Unsupported lock type: lock_type={lock_type}") @@ -388,7 +393,7 @@ def _start(): self.t.j_table.addUpdateListener(self.listener) if do_replay: - _do_locked(_start, lock_type=replay_lock) + _do_locked(self.t, _start, lock_type=replay_lock) else: _start() except Exception as e: diff --git a/py/server/deephaven/ugp.py b/py/server/deephaven/ugp.py deleted file mode 100644 index 64089512514..00000000000 --- a/py/server/deephaven/ugp.py +++ /dev/null @@ -1,139 +0,0 @@ -# -# Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending -# -"""This module provides access to the Update Graph Processor(UGP)'s locks that must be acquired to perform certain -table operations. When working with refreshing tables, UGP locks must be held in order to have a consistent view of -the data between table operations. -""" - -import contextlib -from collections import abc -from functools import wraps -from typing import Callable - -import jpy - -from deephaven import DHError - -_JUpdateGraphProcessor = jpy.get_type("io.deephaven.engine.updategraph.UpdateGraphProcessor") -_j_exclusive_lock = _JUpdateGraphProcessor.DEFAULT.exclusiveLock() -_j_shared_lock = _JUpdateGraphProcessor.DEFAULT.sharedLock() - -auto_locking = True -"""Whether to automatically acquire the Update Graph Processor(UGP) shared lock for an unsafe operation on a refreshing -table when the current thread doesn't own either the UGP shared or the UGP exclusive lock. The newly obtained lock will -be released after the table operation finishes. Auto locking is turned on by default.""" - - -def has_exclusive_lock() -> bool: - """Checks if the current thread is holding the Update Graph Processor(UGP) exclusive lock.""" - return _j_exclusive_lock.isHeldByCurrentThread() - - -def has_shared_lock() -> bool: - """Checks if the current thread is holding the Update Graph Processor(UGP) shared lock.""" - return _j_shared_lock.isHeldByCurrentThread() - - -@contextlib.contextmanager -def exclusive_lock(): - """Context manager for running a block of code under a Update Graph Processor(UGP) exclusive lock.""" - _j_exclusive_lock.lock() - try: - yield - except Exception as e: - raise DHError(e, "exception raised in the enclosed code block.") from e - finally: - _j_exclusive_lock.unlock() - - -@contextlib.contextmanager -def shared_lock(): - """Context manager for running a block of code under a Update Graph Processor(UGP) shared lock.""" - _j_shared_lock.lock() - try: - yield - except Exception as e: - raise DHError(e, "exception raised in the enclosed code block.") from e - finally: - _j_shared_lock.unlock() - - -def exclusive_locked(f: Callable) -> Callable: - """A decorator that ensures the decorated function be called under the Update Graph Processor(UGP) exclusive - lock. The lock is released after the function returns regardless of what happens inside the function.""" - - @wraps(f) - def do_locked(*arg, **kwargs): - with exclusive_lock(): - return f(*arg, **kwargs) - - return do_locked - - -def shared_locked(f: Callable) -> Callable: - """A decorator that ensures the decorated function be called under the Update Graph Processor(UGP) shared lock. - The lock is released after the function returns regardless of what happens inside the function.""" - - @wraps(f) - def do_locked(*arg, **kwargs): - with shared_lock(): - return f(*arg, **kwargs) - - return do_locked - - -def _is_arg_refreshing(arg): - if isinstance(arg, list) or isinstance(arg, tuple): - for e in arg: - if _is_arg_refreshing(e): - return True - elif getattr(arg, "is_refreshing", False): - return True - - return False - - -def _has_refreshing_tables(*args, **kwargs): - for arg in args: - if _is_arg_refreshing(arg): - return True - for k, v in kwargs.items(): - if _is_arg_refreshing(v): - return True - - return False - - -def auto_locking_op(f: Callable) -> Callable: - """A decorator for annotating unsafe Table operations. It ensures that the decorated function runs under the UGP - shared lock if ugp.auto_locking is True, the target table-like object or any table-like arguments are refreshing, - and the current thread doesn't own any UGP locks.""" - - @wraps(f) - def do_locked(*args, **kwargs): - if (not _has_refreshing_tables(*args, **kwargs) - or not auto_locking - or has_shared_lock() - or has_exclusive_lock()): - return f(*args, **kwargs) - - with shared_lock(): - return f(*args, **kwargs) - - return do_locked - - -@contextlib.contextmanager -def auto_locking_ctx(*args, **kwargs): - """An auto-locking aware context manager. It ensures that the enclosed code block runs under the UGP shared lock if - ugp.auto_locking is True, the target table-like object or any table-like arguments are refreshing, and the current - thread doesn't own any UGP locks.""" - if (not _has_refreshing_tables(*args, **kwargs) - or not auto_locking - or has_shared_lock() - or has_exclusive_lock()): - yield - else: - with shared_lock(): - yield diff --git a/py/server/deephaven/update_graph.py b/py/server/deephaven/update_graph.py new file mode 100644 index 00000000000..6658bc60a83 --- /dev/null +++ b/py/server/deephaven/update_graph.py @@ -0,0 +1,222 @@ +# +# Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending +# +"""This module provides access to the Update Graph (UG)'s locks that must be acquired to perform certain +table operations. When working with refreshing tables, UG locks must be held in order to have a consistent view of +the data between table operations. +""" + +import contextlib +from functools import wraps +from typing import Callable, Union + +import jpy + +from deephaven import DHError + +_JUpdateGraph = jpy.get_type("io.deephaven.engine.updategraph.UpdateGraph") + +auto_locking = True +"""Whether to automatically acquire the Update Graph (UG) shared lock for an unsafe operation on a refreshing +table when the current thread doesn't own either the UG shared or the UG exclusive lock. The newly obtained lock will +be released after the table operation finishes. Auto locking is turned on by default.""" + + +def has_exclusive_lock(ug: Union[_JUpdateGraph, "Table", "PartitionedTable", "PartitionTableProxy"]) -> bool: + """Checks if the current thread is holding the provided Update Graph's (UG) exclusive lock. + + Args: + ug (Union[_JUpdateGraph, Table, PartitionedTable, PartitionTableProxy]): The Update Graph (UG) or a table-like object. + + Returns: + True if the current thread is holding the Update Graph (UG) exclusive lock, False otherwise. + """ + from deephaven.table import Table, PartitionedTable, PartitionedTableProxy + if isinstance(ug, Table): + ug = ug.update_graph + if isinstance(ug, PartitionedTable): + ug = ug.update_graph + if isinstance(ug, PartitionedTableProxy): + ug = ug.update_graph + + return ug.exclusiveLock().isHeldByCurrentThread() + + +def has_shared_lock(ug: Union[_JUpdateGraph, "Table", "PartitionedTable", "PartitionTableProxy"]) -> bool: + """Checks if the current thread is holding the provided Update Graph's (UG) shared lock. + + Args: + ug (Union[_JUpdateGraph, Table, PartitionedTable, PartitionTableProxy]): The Update Graph (UG) or a table-like object. + + Returns: + True if the current thread is holding the Update Graph (UG) shared lock, False otherwise. + """ + from deephaven.table import Table, PartitionedTable, PartitionedTableProxy + if isinstance(ug, Table): + ug = ug.update_graph + if isinstance(ug, PartitionedTable): + ug = ug.update_graph + if isinstance(ug, PartitionedTableProxy): + ug = ug.update_graph + + return ug.sharedLock().isHeldByCurrentThread() + + +@contextlib.contextmanager +def exclusive_lock(ug: Union[_JUpdateGraph, "Table", "PartitionedTable", "PartitionTableProxy"]): + """Context manager for running a block of code under an Update Graph (UG) exclusive lock. + + Args: + ug (Union[_JUpdateGraph, Table, PartitionedTable, PartitionTableProxy]): The Update Graph (UG) or a table-like object. + """ + from deephaven.table import Table, PartitionedTable, PartitionedTableProxy + if isinstance(ug, Table): + ug = ug.update_graph + if isinstance(ug, PartitionedTable): + ug = ug.update_graph + if isinstance(ug, PartitionedTableProxy): + ug = ug.update_graph + + lock = ug.exclusiveLock() + lock.lock() + try: + yield + except Exception as e: + raise DHError(e, "exception raised in the enclosed code block.") from e + finally: + lock.unlock() + + +@contextlib.contextmanager +def shared_lock(ug: Union[_JUpdateGraph, "Table", "PartitionedTable", "PartitionTableProxy"]): + """Context manager for running a block of code under an Update Graph (UG) shared lock. + + Args: + ug (Union[_JUpdateGraph, Table, PartitionedTable, PartitionTableProxy]): The Update Graph (UG) or a table-like object. + """ + from deephaven.table import Table, PartitionedTable, PartitionedTableProxy + if isinstance(ug, Table): + ug = ug.update_graph + if isinstance(ug, PartitionedTable): + ug = ug.update_graph + if isinstance(ug, PartitionedTableProxy): + ug = ug.update_graph + + lock = ug.sharedLock() + lock.lock() + try: + yield + except Exception as e: + raise DHError(e, "exception raised in the enclosed code block.") from e + finally: + lock.unlock() + + +def exclusive_locked(ug: Union[_JUpdateGraph, "Table", "PartitionedTable", "PartitionTableProxy"]) -> Callable: + """A decorator that ensures the decorated function be called under the Update Graph (UG) exclusive + lock. The lock is released after the function returns regardless of what happens inside the function. + + Args: + ug (Union[_JUpdateGraph, Table, PartitionedTable, PartitionTableProxy]): The Update Graph (UG) or a table-like object. + """ + from deephaven.table import Table, PartitionedTable, PartitionedTableProxy + if isinstance(ug, Table): + ug = ug.update_graph + if isinstance(ug, PartitionedTable): + ug = ug.update_graph + if isinstance(ug, PartitionedTableProxy): + ug = ug.update_graph + + def inner_wrapper(f: Callable) -> Callable: + @wraps(f) + def do_locked(*arg, **kwargs): + with exclusive_lock(ug): + return f(*arg, **kwargs) + + return do_locked + + return inner_wrapper + + +def shared_locked(ug: Union[_JUpdateGraph, "Table", "PartitionedTable", "PartitionTableProxy"]) -> Callable: + """A decorator that ensures the decorated function be called under the Update Graph (UG) shared lock. + The lock is released after the function returns regardless of what happens inside the function. + + Args: + ug (Union[_JUpdateGraph, Table, PartitionedTable, PartitionTableProxy]): The Update Graph (UG) or a table-like object. + """ + from deephaven.table import Table, PartitionedTable, PartitionedTableProxy + if isinstance(ug, Table): + ug = ug.update_graph + if isinstance(ug, PartitionedTable): + ug = ug.update_graph + if isinstance(ug, PartitionedTableProxy): + ug = ug.update_graph + + def inner_wrapper(f: Callable) -> Callable: + @wraps(f) + def do_locked(*arg, **kwargs): + with shared_lock(ug): + return f(*arg, **kwargs) + + return do_locked + + return inner_wrapper + + +def _is_arg_refreshing(arg): + if isinstance(arg, list) or isinstance(arg, tuple): + for e in arg: + if _is_arg_refreshing(e): + return True + elif getattr(arg, "is_refreshing", False): + return True + + return False + + +def _first_refreshing_table(*args, **kwargs): + for arg in args: + if _is_arg_refreshing(arg): + return arg + for k, v in kwargs.items(): + if _is_arg_refreshing(v): + return v + + return None + + +def auto_locking_op(f: Callable) -> Callable: + """A decorator for annotating unsafe Table operations. It ensures that the decorated function runs under the UG + shared lock if ugp.auto_locking is True, the target table-like object or any table-like arguments are refreshing, + and the current thread doesn't own any UG locks.""" + + @wraps(f) + def do_locked(*args, **kwargs): + arg = _first_refreshing_table(*args, **kwargs) + if (not arg + or not auto_locking + or has_shared_lock(arg) + or has_exclusive_lock(arg)): + return f(*args, **kwargs) + + with shared_lock(arg.update_graph): + return f(*args, **kwargs) + + return do_locked + + +@contextlib.contextmanager +def auto_locking_ctx(*args, **kwargs): + """An auto-locking aware context manager. It ensures that the enclosed code block runs under the UG shared lock if + ugp.auto_locking is True, the target table-like object or any table-like arguments are refreshing, and the current + thread doesn't own any UG locks.""" + arg = _first_refreshing_table(*args, **kwargs) + if (not arg + or not auto_locking + or has_shared_lock(arg) + or has_exclusive_lock(arg)): + yield + else: + with shared_lock(arg.update_graph): + yield diff --git a/py/server/test_helper/__init__.py b/py/server/test_helper/__init__.py index 9b190c61cac..f99d5127b6f 100644 --- a/py/server/test_helper/__init__.py +++ b/py/server/test_helper/__init__.py @@ -12,7 +12,6 @@ from deephaven_internal import jvm - py_dh_session = None @@ -57,7 +56,6 @@ def start_jvm(jvm_props: Dict[str, str] = None): } jvm_classpath = os.environ.get('DEEPHAVEN_CLASSPATH', '') - # Start up the JVM jpy.VerboseExceptions.enabled = True jvm.init_jvm( @@ -69,7 +67,10 @@ def start_jvm(jvm_props: Dict[str, str] = None): # Set up a Deephaven Python session py_scope_jpy = jpy.get_type("io.deephaven.engine.util.PythonScopeJpyImpl").ofMainGlobals() global py_dh_session - py_dh_session = jpy.get_type("io.deephaven.integrations.python.PythonDeephavenSession")(py_scope_jpy) + _JUpdateGraph = jpy.get_type("io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph") + test_update_graph = _JUpdateGraph.newBuilder("PYTHON_TEST").existingOrBuild() + _JPythonScriptSession = jpy.get_type("io.deephaven.integrations.python.PythonDeephavenSession") + py_dh_session = _JPythonScriptSession(test_update_graph, py_scope_jpy) def _expandWildcardsInList(elements): diff --git a/py/server/tests/test_arrow.py b/py/server/tests/test_arrow.py index 5cbfc42d1b1..31f296f3e10 100644 --- a/py/server/tests/test_arrow.py +++ b/py/server/tests/test_arrow.py @@ -21,8 +21,8 @@ class ArrowTestCase(BaseTestCase): test_table: Table - @classmethod - def setUpClass(cls) -> None: + def setUp(self) -> None: + super().setUp() cols = [ bool_col(name="Boolean", data=[True, False]), byte_col(name="Byte", data=(1, -1)), @@ -36,11 +36,11 @@ def setUpClass(cls) -> None: string_col(name="String", data=["foo", "bar"]), datetime_col(name="Datetime", data=[epoch_nanos_to_instant(1), epoch_nanos_to_instant(-1)]), ] - cls.test_table = new_table(cols=cols) + self.test_table = new_table(cols=cols) - @classmethod - def tearDownClass(cls) -> None: - del cls.test_table + def tearDown(self) -> None: + del self.test_table + super().tearDown() def verify_type_conversion(self, pa_types: List[pa.DataType], pa_data: List[Any], cast_for_round_trip: bool = False): fields = [pa.field(f"f{i}", ty) for i, ty in enumerate(pa_types)] diff --git a/py/server/tests/test_exec_ctx.py b/py/server/tests/test_exec_ctx.py index 778c117bc86..d955236f966 100644 --- a/py/server/tests/test_exec_ctx.py +++ b/py/server/tests/test_exec_ctx.py @@ -32,8 +32,8 @@ def assert_threads_ok(thread_func): main_exec_ctx = get_exec_ctx() def thread_update1(p: int, thread_results): - t = empty_table(1) with main_exec_ctx: + t = empty_table(1) t2 = t.update("X = p*p") thread_results[p] = t2.to_string() @@ -80,8 +80,8 @@ def assert_executor_ok(thread_func): main_exec_ctx = get_exec_ctx() def thread_update1(p: int): - t = empty_table(1) with main_exec_ctx: + t = empty_table(1) t2 = t.update("X = p*p") return t2.to_string() diff --git a/py/server/tests/test_experiments.py b/py/server/tests/test_experiments.py index a37cb31633a..e86dbd95043 100644 --- a/py/server/tests/test_experiments.py +++ b/py/server/tests/test_experiments.py @@ -5,8 +5,9 @@ import unittest from deephaven import time_table, empty_table, DHError -from deephaven.ugp import exclusive_lock +from deephaven.update_graph import exclusive_lock from deephaven.experimental import time_window +from deephaven.execution_context import get_exec_ctx from tests.testbase import BaseTestCase from deephaven import read_csv from deephaven.experimental.outer_joins import full_outer_join, left_outer_join @@ -16,6 +17,7 @@ class ExperimentalTestCase(BaseTestCase): def setUp(self): super().setUp() self.test_table = read_csv("tests/data/test_table.csv") + self.test_update_graph = get_exec_ctx().update_graph def tearDown(self) -> None: self.test_table = None @@ -64,7 +66,7 @@ def test_left_outer_join(self): self.assertRegex(str(cm.exception), r"Conflicting column names") def test_time_window(self): - with exclusive_lock(): + with exclusive_lock(self.test_update_graph): source_table = time_table("PT00:00:00.01").update(["TS=now()"]) t = time_window(source_table, ts_col="TS", window=10 ** 8, bool_col="InWindow") diff --git a/py/server/tests/test_kafka_consumer.py b/py/server/tests/test_kafka_consumer.py index acfc5f1897c..0ea15dfe4e0 100644 --- a/py/server/tests/test_kafka_consumer.py +++ b/py/server/tests/test_kafka_consumer.py @@ -5,7 +5,7 @@ import os import unittest -from deephaven import kafka_consumer as ck, ugp +from deephaven import kafka_consumer as ck from deephaven.stream.kafka.consumer import TableType, KeyValueSpec from tests.testbase import BaseTestCase from deephaven import dtypes diff --git a/py/server/tests/test_liveness.py b/py/server/tests/test_liveness.py index d0e8fa955d1..bed4e9aedec 100644 --- a/py/server/tests/test_liveness.py +++ b/py/server/tests/test_liveness.py @@ -8,23 +8,27 @@ from deephaven import time_table, DHError -from deephaven.ugp import exclusive_lock +from deephaven.execution_context import get_exec_ctx from deephaven.liveness_scope import liveness_scope +from deephaven.update_graph import exclusive_lock from tests.testbase import BaseTestCase +class LivenessTestCase(BaseTestCase): -def create_table(): - with exclusive_lock(): - return time_table("PT00:00:00.001").update(["X=i%11"]).sort("X").tail(16) + def setUp(self) -> None: + super().setUp() + self.test_update_graph = get_exec_ctx().update_graph + def create_table(self): + with exclusive_lock(self.test_update_graph): + return time_table("PT00:00:00.001").update(["X=i%11"]).sort("X").tail(16) -class LivenessTestCase(BaseTestCase): def test_liveness(self): - not_managed = create_table() + not_managed = self.create_table() with liveness_scope() as l_scope: - to_discard = create_table() + to_discard = self.create_table() df = to_pandas(to_discard) - must_keep = create_table() + must_keep = self.create_table() df = to_pandas(must_keep) l_scope.preserve(must_keep) @@ -33,17 +37,17 @@ def test_liveness(self): self.assertFalse(to_discard.j_table.tryRetainReference()) with liveness_scope(): - to_discard = create_table() + to_discard = self.create_table() df = to_pandas(to_discard) - must_keep = create_table() + must_keep = self.create_table() df = to_pandas(must_keep) with self.assertRaises(DHError): l_scope = liveness_scope() - to_discard = create_table() + to_discard = self.create_table() df = to_pandas(to_discard) l_scope_2 = liveness_scope() - must_keep = create_table() + must_keep = self.create_table() df = to_pandas(must_keep) l_scope.preserve(must_keep) l_scope.close() @@ -51,16 +55,16 @@ def test_liveness(self): def test_liveness_nested(self): with liveness_scope() as l_scope: - to_discard = create_table() + to_discard = self.create_table() df = to_pandas(to_discard) - must_keep = create_table() + must_keep = self.create_table() df = to_pandas(must_keep) l_scope.preserve(must_keep) with liveness_scope() as nested_l_scope: - nested_to_discard = create_table() + nested_to_discard = self.create_table() df = to_pandas(nested_to_discard) - nested_must_keep = create_table() + nested_must_keep = self.create_table() df = to_pandas(nested_must_keep) nested_l_scope.preserve(nested_must_keep) self.assertTrue(nested_must_keep.j_table.tryRetainReference()) diff --git a/py/server/tests/test_partitioned_table.py b/py/server/tests/test_partitioned_table.py index 327ac621b6a..16e03a0abf9 100644 --- a/py/server/tests/test_partitioned_table.py +++ b/py/server/tests/test_partitioned_table.py @@ -10,8 +10,9 @@ from deephaven.filters import Filter -from deephaven import read_csv, DHError, new_table, ugp, time_table +from deephaven import read_csv, DHError, new_table, update_graph, time_table from tests.testbase import BaseTestCase +from deephaven.execution_context import get_exec_ctx def transform_func(t: Table) -> Table: @@ -38,6 +39,7 @@ def setUp(self): super().setUp() self.test_table = read_csv("tests/data/test_table.csv").tail(num_rows=100) self.partitioned_table = self.test_table.partition_by(by=["c", "e"]) + self.test_update_graph = get_exec_ctx().update_graph def tearDown(self): self.partitioned_table = None @@ -140,7 +142,7 @@ def test_partitioned_transform(self): self.assertIn("f", [col.name for col in pt.constituent_table_columns]) def test_partition_agg(self): - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) self.wait_ticking_table_update(test_table, row_count=1, timeout=5) agg = partition("aggPartition", include_by_columns=True) @@ -157,7 +159,7 @@ def test_partition_agg(self): self.assertEqual(3, len(pt.constituent_table_columns)) def test_from_partitioned_table(self): - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) pt = test_table.partition_by("Y") @@ -189,7 +191,7 @@ def test_from_partitioned_table(self): self.assertIn("no column named", str(cm.exception)) def test_from_constituent_tables(self): - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) test_table1 = time_table("PT00:00:01").update(["X=i", "Y=i%23", "Z=X*Y"]) test_table2 = time_table("PT00:00:00.001").update(["X=i", "Y=i%23", "Z=`foo`"]) @@ -211,7 +213,7 @@ def test_keys(self): select_distinct_table = self.test_table.select_distinct(["c", "e"]) self.assertEqual(keys_table.size, select_distinct_table.size) - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) pt = test_table.partition_by("Y") self.wait_ticking_table_update(test_table, row_count=20, timeout=5) diff --git a/py/server/tests/test_pt_proxy.py b/py/server/tests/test_pt_proxy.py index 5a42c4bc2e3..fcb802f1a76 100644 --- a/py/server/tests/test_pt_proxy.py +++ b/py/server/tests/test_pt_proxy.py @@ -4,11 +4,12 @@ import unittest -from deephaven import read_csv, empty_table, SortDirection, DHError, time_table, ugp +from deephaven import read_csv, empty_table, SortDirection, DHError, time_table, update_graph from deephaven.agg import sum_, avg, pct, weighted_avg, formula, group, first, last, max_, median, min_, std, abs_sum, \ var from deephaven.table import PartitionedTableProxy from tests.testbase import BaseTestCase +from deephaven.execution_context import get_exec_ctx class PartitionedTableProxyTestCase(BaseTestCase): @@ -17,6 +18,7 @@ def setUp(self): self.test_table = read_csv("tests/data/test_table.csv").tail(num_rows=100) self.partitioned_table = self.test_table.partition_by(by=["c"]) self.pt_proxy = self.partitioned_table.proxy() + self.test_update_graph = get_exec_ctx().update_graph def tearDown(self): self.partitioned_table = None @@ -27,7 +29,7 @@ def test_target(self): self.assertEqual(self.partitioned_table, self.pt_proxy.target) def test_is_refreshing(self): - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) pt = test_table.partition_by("Y") diff --git a/py/server/tests/test_sql.py b/py/server/tests/test_sql.py index 8f0416e64d6..8db0cf0e11a 100644 --- a/py/server/tests/test_sql.py +++ b/py/server/tests/test_sql.py @@ -4,13 +4,16 @@ import jpy -from deephaven import DHError, read_csv, empty_table +from deephaven import read_csv, empty_table +from deephaven.execution_context import ExecutionContext from deephaven.experimental import sql from tests.testbase import BaseTestCase +from test_helper import py_dh_session _JTableSpec = jpy.get_type("io.deephaven.qst.table.TableSpec") -some_global_table = empty_table(42) +with ExecutionContext(j_exec_ctx=py_dh_session.getExecutionContext()): + some_global_table = empty_table(42) class SqlTest(BaseTestCase): diff --git a/py/server/tests/test_table.py b/py/server/tests/test_table.py index f29091bd55d..63c1de2e7de 100644 --- a/py/server/tests/test_table.py +++ b/py/server/tests/test_table.py @@ -5,11 +5,11 @@ from types import SimpleNamespace from typing import List, Any -from deephaven import DHError, read_csv, empty_table, SortDirection, time_table, ugp, new_table, dtypes +from deephaven import DHError, read_csv, empty_table, SortDirection, time_table, update_graph, new_table, dtypes from deephaven.agg import sum_, weighted_avg, avg, pct, group, count_, first, last, max_, median, min_, std, abs_sum, \ var, formula, partition, unique, count_distinct, distinct from deephaven.column import datetime_col -from deephaven.execution_context import make_user_exec_ctx +from deephaven.execution_context import make_user_exec_ctx, get_exec_ctx from deephaven.html import to_html from deephaven.jcompat import j_hashmap from deephaven.pandas import to_pandas @@ -60,6 +60,7 @@ def setUp(self): weighted_avg("var", ["weights"]), ] self.aggs = self.aggs_for_rollup + self.aggs_not_for_rollup + self.test_update_graph = get_exec_ctx().update_graph def tearDown(self) -> None: self.test_table = None @@ -745,7 +746,7 @@ def inner_func(p) -> str: t = empty_table(1).update("X = p * 10") return t.to_string().split()[2] - with make_user_exec_ctx(), ugp.shared_lock(): + with make_user_exec_ctx(), update_graph.shared_lock(self.test_update_graph): t = time_table("PT00:00:01").update("X = i").update("TableString = inner_func(X + 10)") self.wait_ticking_table_update(t, row_count=5, timeout=10) @@ -793,9 +794,9 @@ def test_decorated_methods(self): self.verify_table_data(rt, [101, 202]) def test_ticking_table_scope(self): - from deephaven import ugp + from deephaven import update_graph x = 1 - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): rt = time_table("PT00:00:01").update("X = x") self.wait_ticking_table_update(rt, row_count=1, timeout=5) self.verify_table_data(rt, [1]) @@ -806,12 +807,12 @@ def test_ticking_table_scope(self): x = SimpleNamespace() x.v = 1 - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): rt = time_table("PT00:00:01").update("X = x.v").drop_columns("Timestamp") self.wait_ticking_table_update(rt, row_count=1, timeout=5) for i in range(2, 5): - with ugp.exclusive_lock(): + with update_graph.exclusive_lock(self.test_update_graph): x.v = i self.wait_ticking_table_update(rt, row_count=rt.size + 1, timeout=5) self.verify_table_data(rt, list(range(1, 5))) @@ -831,13 +832,13 @@ def test_python_field_access(self): self.assertIn("2000", html_output) def test_slice(self): - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): t = time_table("PT00:00:00.01") rt = t.slice(0, 3) self.assert_table_equals(t.head(3), rt) self.wait_ticking_table_update(t, row_count=5, timeout=5) - with ugp.shared_lock(): + with update_graph.shared_lock(self.test_update_graph): rt = t.slice(t.size, -2) self.assertEqual(0, rt.size) self.wait_ticking_table_update(rt, row_count=1, timeout=5) diff --git a/py/server/tests/test_table_listener.py b/py/server/tests/test_table_listener.py index b74c73f4716..2be8169ece2 100644 --- a/py/server/tests/test_table_listener.py +++ b/py/server/tests/test_table_listener.py @@ -13,7 +13,8 @@ from deephaven.jcompat import to_sequence from deephaven.table import Table from deephaven.table_listener import listen, TableListener, TableListenerHandle -from deephaven.ugp import exclusive_lock +from deephaven.execution_context import get_exec_ctx +from deephaven.update_graph import exclusive_lock from tests.testbase import BaseTestCase @@ -59,7 +60,7 @@ class TableListenerTestCase(BaseTestCase): def setUp(self) -> None: super().setUp() - with exclusive_lock(): + with exclusive_lock(get_exec_ctx().update_graph): self.test_table = time_table("PT00:00:00.001").update(["X=i%11"]).sort("X").tail(16) source_table = time_table("PT00:00:00.001").update(["TS=now()"]) self.test_table2 = time_window(source_table, ts_col="TS", window=10 ** 7, bool_col="InWindow") diff --git a/py/server/tests/test_ugp.py b/py/server/tests/test_update_graph.py similarity index 82% rename from py/server/tests/test_ugp.py rename to py/server/tests/test_update_graph.py index b5c68f7ba78..b0143634d24 100644 --- a/py/server/tests/test_ugp.py +++ b/py/server/tests/test_update_graph.py @@ -5,9 +5,9 @@ import jpy import unittest -from deephaven import time_table, DHError, merge, merge_sorted -from deephaven import ugp -from deephaven.execution_context import make_user_exec_ctx +from deephaven import time_table, DHError, merge +from deephaven import update_graph as ug +from deephaven.execution_context import make_user_exec_ctx, get_exec_ctx from deephaven.table import Table from tests.testbase import BaseTestCase @@ -20,39 +20,40 @@ def partitioned_transform_func(t: Table, ot: Table) -> Table: return t.natural_join(ot, on=["X", "Z"], joins=["f"]) -class UgpTestCase(BaseTestCase): +class UpdateGraphTestCase(BaseTestCase): def setUp(self) -> None: super().setUp() - ugp.auto_locking = False + ug.auto_locking = False + self.test_update_graph = get_exec_ctx().update_graph def tearDown(self): - ugp.auto_locking = True + ug.auto_locking = True super().tearDown() - def test_ugp_context_manager(self): + def test_ug_context_manager(self): with self.assertRaises(DHError) as cm: test_table = time_table("PT00:00:00.001").update(["X=i%11"]).sort("X").tail(16) self.assertRegex(str(cm.exception), r"IllegalStateException") - with ugp.exclusive_lock(): + with ug.exclusive_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["TS=now()"]) - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["TS=now()"]) # nested locking - with ugp.exclusive_lock(): - with ugp.shared_lock(): + with ug.exclusive_lock(self.test_update_graph): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["TS=now()"]) test_table = time_table("PT00:00:00.001").update(["TS=now()"]) with self.assertRaises(DHError) as cm: - with ugp.shared_lock(): - with ugp.exclusive_lock(): + with ug.shared_lock(self.test_update_graph): + with ug.exclusive_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["TS=now()"]) self.assertRegex(str(cm.exception), "Cannot upgrade a shared lock to an exclusive lock") - def test_ugp_decorator_exclusive(self): + def test_ug_decorator_exclusive(self): def ticking_table_op(tail_size: int, period: str = "PT00:00:01"): return time_table(period).update(["X=i%11"]).sort("X").tail(tail_size) @@ -60,7 +61,7 @@ def ticking_table_op(tail_size: int, period: str = "PT00:00:01"): t = ticking_table_op(16) self.assertRegex(str(cm.exception), r"IllegalStateException") - @ugp.exclusive_locked + @ug.exclusive_locked(self.test_update_graph) def ticking_table_op_decorated(tail_size: int, period: str = "PT00:00:01"): t = time_table(period).update(["X=i%11"]).sort("X").tail(tail_size) self.assertEqual(t.size, 0) @@ -75,8 +76,8 @@ def ticking_table_op_decorated(tail_size: int, period: str = "PT00:00:01"): t = ticking_table_op_decorated(10, "PT00:00:00.001") self.wait_ticking_table_update(t, row_count=8, timeout=5) - def test_ugp_decorator_shared(self): - @ugp.shared_locked + def test_ug_decorator_shared(self): + @ug.shared_locked(self.test_update_graph) def ticking_table_op_decorated(tail_size: int, period: str = "PT00:00:01"): t = time_table(period).update(["X=i%11"]).sort("X").tail(tail_size) self.assertEqual(t.size, 0) @@ -92,10 +93,10 @@ def ticking_table_op_decorated(tail_size: int, period: str = "PT00:00:01"): self.wait_ticking_table_update(t, row_count=8, timeout=5) def test_auto_locking_release(self): - ugp.auto_locking = True + ug.auto_locking = True test_table = time_table("PT00:00:00.001").update(["X=i%11"]).sort("X").tail(16) - self.assertFalse(ugp.has_shared_lock()) - self.assertFalse(ugp.has_exclusive_lock()) + self.assertFalse(ug.has_shared_lock(self.test_update_graph)) + self.assertFalse(ug.has_exclusive_lock(self.test_update_graph)) def test_auto_locking_update_select(self): test_table = time_table("PT00:00:00.001") @@ -113,13 +114,13 @@ def test_auto_locking_update_select(self): self.assertRegex(str(cm.exception), r"IllegalStateException") # auto_locking on - ugp.auto_locking = True + ug.auto_locking = True for op in ops: with self.subTest(op=op): result_table = op(test_table, "X = i % 11") def test_auto_locking_wherein(self): - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) unique_table = test_table.head(num_rows=50).select_distinct(formulas=["X", "Y"]) @@ -131,12 +132,12 @@ def test_auto_locking_wherein(self): result_table = test_table.where_not_in(unique_table, cols=["Y"]) self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True result_table = test_table.where_in(unique_table, cols=["Y"]) result_table = test_table.where_not_in(unique_table, cols=["Y"]) def test_auto_locking_joins(self): - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) left_table = test_table.drop_columns(["Z", "Timestamp"]) @@ -156,13 +157,13 @@ def test_auto_locking_joins(self): result_table = left_table.aj(right_table, on="X") self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True for op in ops: with self.subTest(op=op): result_table = left_table.aj(right_table, on="X") def test_auto_locking_rename_columns(self): - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) cols_to_rename = [ @@ -173,23 +174,23 @@ def test_auto_locking_rename_columns(self): result_table = test_table.rename_columns(cols_to_rename) self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True result_table = test_table.rename_columns(cols_to_rename) def test_auto_locking_ungroup(self): - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13"]) grouped_table = test_table.group_by(by=["Y"]) with self.assertRaises(DHError) as cm: ungrouped_table = grouped_table.ungroup(cols=["X"]) self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True ungrouped_table = grouped_table.ungroup(cols=["X"]) def test_auto_locking_head_tail_by(self): ops = [Table.head_by, Table.tail_by] - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i%11"]).sort("X").tail(16) for op in ops: @@ -199,65 +200,65 @@ def test_auto_locking_head_tail_by(self): self.assertLessEqual(result_table.size, test_table.size) self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True for op in ops: with self.subTest(op=op): result_table = op(test_table, num_rows=1, by=["X"]) self.assertLessEqual(result_table.size, test_table.size) def test_auto_locking_partitioned_table(self): - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) pt = test_table.partition_by(by="Y") with self.subTest("Merge"): - ugp.auto_locking = False + ug.auto_locking = False with self.assertRaises(DHError) as cm: t = pt.merge() self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True t = pt.merge() with self.subTest("Transform"): - ugp.auto_locking = False + ug.auto_locking = False with make_user_exec_ctx(), self.assertRaises(DHError) as cm: pt1 = pt.transform(transform_func) self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True with make_user_exec_ctx(): pt1 = pt.transform(transform_func) with self.subTest("Partitioned Transform"): - ugp.auto_locking = False + ug.auto_locking = False with make_user_exec_ctx(), self.assertRaises(DHError) as cm: pt2 = pt.partitioned_transform(pt1, partitioned_transform_func) self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True with make_user_exec_ctx(): pt2 = pt.partitioned_transform(pt1, partitioned_transform_func) def test_auto_locking_table_factory(self): - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:00.001").update(["X=i", "Y=i%13", "Z=X*Y"]) test_table1 = time_table("PT00:00:00.001").update(["X=i", "Y=i%23", "Z=X*Y"]) with self.subTest("Merge"): - ugp.auto_locking = False + ug.auto_locking = False with self.assertRaises(DHError) as cm: t = merge([test_table, test_table1]) self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True t = merge([test_table, test_table1]) with self.subTest("Merge Sorted"): self.skipTest("mergeSorted does not yet support refreshing tables") def test_auto_locking_partitioned_table_proxy(self): - with ugp.shared_lock(): + with ug.shared_lock(self.test_update_graph): test_table = time_table("PT00:00:01").update(["X=i", "Y=i%13", "Z=X*Y"]) proxy = test_table.drop_columns(["Timestamp", "Y"]).partition_by(by="X").proxy() proxy2 = test_table.drop_columns(["Timestamp", "Z"]).partition_by(by="X").proxy() @@ -266,7 +267,7 @@ def test_auto_locking_partitioned_table_proxy(self): joined_pt_proxy = proxy.natural_join(proxy2, on="X") self.assertRegex(str(cm.exception), r"IllegalStateException") - ugp.auto_locking = True + ug.auto_locking = True joined_pt_proxy = proxy.natural_join(proxy2, on="X") del joined_pt_proxy diff --git a/py/server/tests/test_updateby.py b/py/server/tests/test_updateby.py index 8d16da7a2be..82b1ae0b223 100644 --- a/py/server/tests/test_updateby.py +++ b/py/server/tests/test_updateby.py @@ -4,7 +4,7 @@ import unittest -from deephaven import read_csv, time_table, ugp +from deephaven import read_csv, time_table, update_graph from deephaven.updateby import BadDataBehavior, MathContext, OperationControl, DeltaControl, ema_tick, ema_time, \ ems_tick, ems_time, emmin_tick, emmin_time, emmax_tick, emmax_time, emstd_tick, emstd_time,\ cum_sum, cum_prod, cum_min, cum_max, forward_fill, delta, rolling_sum_tick, rolling_sum_time, \ @@ -12,13 +12,15 @@ rolling_max_tick, rolling_max_time, rolling_prod_tick, rolling_prod_time, rolling_count_tick, rolling_count_time, \ rolling_std_tick, rolling_std_time, rolling_wavg_tick, rolling_wavg_time from tests.testbase import BaseTestCase +from deephaven.execution_context import get_exec_ctx class UpdateByTestCase(BaseTestCase): def setUp(self): super().setUp() self.static_table = read_csv("tests/data/test_table.csv").update("Timestamp=now()") - with ugp.exclusive_lock(): + self.test_update_graph = get_exec_ctx().update_graph + with update_graph.exclusive_lock(self.test_update_graph): self.ticking_table = time_table("PT00:00:00.001").update( ["a = i", "b = i*i % 13", "c = i * 13 % 23", "d = a + b", "e = a - b"]) @@ -163,7 +165,7 @@ def test_em(self): rt = t.update_by(ops=op, by="b") self.assertTrue(rt.is_refreshing is t.is_refreshing) self.assertEqual(len(rt.columns), 1 + len(t.columns)) - with ugp.exclusive_lock(): + with update_graph.exclusive_lock(self.test_update_graph): self.assertEqual(rt.size, t.size) def test_em_proxy(self): @@ -178,7 +180,7 @@ def test_em_proxy(self): for ct, rct in zip(pt_proxy.target.constituent_tables, rt_proxy.target.constituent_tables): self.assertTrue(rct.is_refreshing is ct.is_refreshing) self.assertEqual(len(rct.columns), 1 + len(ct.columns)) - with ugp.exclusive_lock(): + with update_graph.exclusive_lock(self.test_update_graph): self.assertEqual(ct.size, rct.size) def test_simple_ops(self): @@ -190,7 +192,7 @@ def test_simple_ops(self): rt = t.update_by(ops=op(pairs), by="e") self.assertTrue(rt.is_refreshing is t.is_refreshing) self.assertEqual(len(rt.columns), 2 + len(t.columns)) - with ugp.exclusive_lock(): + with update_graph.exclusive_lock(self.test_update_graph): self.assertEqual(rt.size, t.size) def test_simple_ops_proxy(self): @@ -209,7 +211,7 @@ def test_simple_ops_proxy(self): 2 + len(pt_proxy.target.constituent_table_columns)) for ct, rct in zip(pt_proxy.target.constituent_tables, rt_proxy.target.constituent_tables): - with ugp.exclusive_lock(): + with update_graph.exclusive_lock(self.test_update_graph): self.assertEqual(ct.size, rct.size) def test_rolling_ops(self): @@ -219,7 +221,7 @@ def test_rolling_ops(self): rt = t.update_by(ops=op, by="c") self.assertTrue(rt.is_refreshing is t.is_refreshing) self.assertEqual(len(rt.columns), 2 + len(t.columns)) - with ugp.exclusive_lock(): + with update_graph.exclusive_lock(self.test_update_graph): self.assertEqual(rt.size, t.size) def test_rolling_ops_proxy(self): @@ -234,7 +236,7 @@ def test_rolling_ops_proxy(self): for ct, rct in zip(pt_proxy.target.constituent_tables, rt_proxy.target.constituent_tables): self.assertTrue(rct.is_refreshing is ct.is_refreshing) self.assertEqual(len(rct.columns), 2 + len(ct.columns)) - with ugp.exclusive_lock(): + with update_graph.exclusive_lock(self.test_update_graph): self.assertEqual(ct.size, rct.size) if __name__ == '__main__': diff --git a/py/server/tests/testbase.py b/py/server/tests/testbase.py index a4aa74be765..4b80c97475c 100644 --- a/py/server/tests/testbase.py +++ b/py/server/tests/testbase.py @@ -3,19 +3,17 @@ # import time -import contextlib import unittest import jpy from deephaven import DHError -from deephaven.ugp import exclusive_lock +from deephaven.update_graph import exclusive_lock from deephaven.table import Table, PartitionedTableProxy from test_helper import py_dh_session _JTableTools = jpy.get_type("io.deephaven.engine.util.TableTools") -_JExecutionContext = jpy.get_type("io.deephaven.engine.context.ExecutionContext") def table_equals(table_a: Table, table_b: Table) -> bool: @@ -48,7 +46,7 @@ def wait_ticking_table_update(self, table: Table, row_count: int, timeout: int): row_count (int): the target row count of the table timeout (int): the number of seconds to wait """ - with exclusive_lock(): + with exclusive_lock(table): timeout *= 10 ** 9 while table.size < row_count and timeout > 0: s_time = time.time_ns() diff --git a/python-engine-test/src/test/java/io/deephaven/engine/table/impl/select/TestConditionFilter.java b/python-engine-test/src/test/java/io/deephaven/engine/table/impl/select/TestConditionFilter.java index 89cb4e93029..a9b683a8630 100644 --- a/python-engine-test/src/test/java/io/deephaven/engine/table/impl/select/TestConditionFilter.java +++ b/python-engine-test/src/test/java/io/deephaven/engine/table/impl/select/TestConditionFilter.java @@ -374,8 +374,9 @@ private void check(String expression, Predicate> testPredica QueryScope currentScope = currentContext.getQueryScope(); try { if (pythonScope == null) { - final ExecutionContext context = new PythonDeephavenSession(new PythonScopeJpyImpl( - getMainGlobals().asDict())).getExecutionContext(); + final ExecutionContext context = new PythonDeephavenSession( + ExecutionContext.getDefaultContext().getUpdateGraph(), + new PythonScopeJpyImpl(getMainGlobals().asDict())).getExecutionContext(); pythonScope = context.getQueryScope(); context.open(); } diff --git a/python-engine-test/src/test/java/io/deephaven/engine/util/TestWorkerPythonEnvironment.java b/python-engine-test/src/test/java/io/deephaven/engine/util/TestWorkerPythonEnvironment.java index d25761fe2b8..7aa7ad0c274 100644 --- a/python-engine-test/src/test/java/io/deephaven/engine/util/TestWorkerPythonEnvironment.java +++ b/python-engine-test/src/test/java/io/deephaven/engine/util/TestWorkerPythonEnvironment.java @@ -3,9 +3,8 @@ */ package io.deephaven.engine.util; -import io.deephaven.base.testing.BaseArrayTestCase; import io.deephaven.configuration.Configuration; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; import io.deephaven.io.log.LogLevel; import io.deephaven.io.logger.StreamLoggerImpl; import io.deephaven.util.process.ProcessEnvironment; @@ -23,9 +22,8 @@ * Test various Jpy related overloading methods. */ @Ignore // TODO (deephaven-core#734) -public class TestWorkerPythonEnvironment extends BaseArrayTestCase { +public class TestWorkerPythonEnvironment extends RefreshingTableTestCase { - @SuppressWarnings("MethodDoesntCallSuperMethod") @Override public void setUp() throws Exception { super.setUp(); @@ -34,14 +32,6 @@ public void setUp() throws Exception { TestWorkerPythonEnvironment.class.getCanonicalName(), new StreamLoggerImpl(System.out, LogLevel.INFO)); } - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); } public void testNumpyImport() { diff --git a/server/build.gradle b/server/build.gradle index 6e77d40ad7d..2d6d596c111 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -95,6 +95,7 @@ dependencies { testImplementation project(':proto:proto-backplane-grpc') testImplementation project(':base-test-utils') + testImplementation project(':engine-context') testImplementation project(':engine-test-utils') testImplementation TestTools.projectDependency(project, 'engine-context') testImplementation TestTools.projectDependency(project, 'Util') diff --git a/server/jetty/build.gradle b/server/jetty/build.gradle index d9d6f7cb9fc..e553d507e74 100644 --- a/server/jetty/build.gradle +++ b/server/jetty/build.gradle @@ -47,6 +47,6 @@ dependencies { Classpaths.inheritSlf4j(project, 'slf4j-simple', 'testRuntimeOnly') } -test.systemProperty "UpdateGraphProcessor.allowUnitTestMode", false +test.systemProperty "PeriodicUpdateGraph.allowUnitTestMode", false apply plugin: 'io.deephaven.java-open-nio' diff --git a/server/netty/build.gradle b/server/netty/build.gradle index 6e4f1a5c46b..5debd148aff 100644 --- a/server/netty/build.gradle +++ b/server/netty/build.gradle @@ -30,6 +30,6 @@ dependencies { Classpaths.inheritSlf4j(project, 'slf4j-simple', 'testRuntimeOnly') } -test.systemProperty "UpdateGraphProcessor.allowUnitTestMode", false +test.systemProperty "PeriodicUpdateGraph.allowUnitTestMode", false apply plugin: 'io.deephaven.java-open-nio' diff --git a/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java b/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java index 24f2ca38d63..1ff0d88d6fa 100644 --- a/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java +++ b/server/src/main/java/io/deephaven/server/arrow/ArrowFlightUtil.java @@ -13,12 +13,14 @@ import io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest; import io.deephaven.base.verify.Assert; import io.deephaven.configuration.Configuration; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.SingletonLivenessManager; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.BaseTable; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.util.BarrageMessage; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.extensions.barrage.BarragePerformanceLog; import io.deephaven.extensions.barrage.BarrageSnapshotOptions; import io.deephaven.extensions.barrage.BarrageStreamGenerator; @@ -39,6 +41,7 @@ import io.deephaven.server.hierarchicaltable.HierarchicalTableViewSubscription; import io.deephaven.server.session.SessionState; import io.deephaven.server.session.TicketRouter; +import io.deephaven.util.SafeCloseable; import io.grpc.stub.ServerCallStreamObserver; import io.grpc.stub.StreamObserver; import org.apache.arrow.flatbuf.MessageHeader; @@ -68,7 +71,7 @@ public static void DoGetCustom( final Flight.Ticket request, final StreamObserver observer) { - final SessionState.ExportObject export = + final SessionState.ExportObject> export = ticketRouter.resolve(session, request, "request"); final BarragePerformanceLog.SnapshotMetricsHelper metrics = @@ -590,24 +593,32 @@ private synchronized void onExportResolved(final SessionState.ExportObject * When a client subscribes initially, a snapshot of the table is sent. The snapshot is obtained using either get() or * getPrev() based on the state of the LogicalClock. On each subsequent update, the client is given the deltas between * the last update propagation and the next. - * + *

* When a client changes its subscription it will be sent a snapshot of only the data that the server believes it needs * assuming that the client has been respecting the existing subscription. Practically, this means that the server may * omit some data if the client's viewport change overlaps the currently recognized viewport. - * + *

* It is possible to use this replication source to create subscriptions that propagate changes from one UGP to another * inside the same JVM. - * + *

* The client-side counterpart of this is the {@link StreamReader}. * * @param The sub-view type that the listener expects to receive. @@ -114,12 +113,12 @@ public static class Operation @AssistedFactory public interface Factory { - Operation create(BaseTable parent, long updateIntervalMs); + Operation create(BaseTable parent, long updateIntervalMs); } private final Scheduler scheduler; private final BarrageStreamGenerator.Factory streamGeneratorFactory; - private final BaseTable parent; + private final BaseTable parent; private final long updateIntervalMs; private final Runnable onGetSnapshot; @@ -127,7 +126,7 @@ public interface Factory { public Operation( final Scheduler scheduler, final BarrageStreamGenerator.Factory streamGeneratorFactory, - @Assisted final BaseTable parent, + @Assisted final BaseTable parent, @Assisted final long updateIntervalMs) { this(scheduler, streamGeneratorFactory, parent, updateIntervalMs, null); } @@ -136,7 +135,7 @@ public Operation( public Operation( final Scheduler scheduler, final BarrageStreamGenerator.Factory streamGeneratorFactory, - final BaseTable parent, + final BaseTable parent, final long updateIntervalMs, @Nullable final Runnable onGetSnapshot) { this.scheduler = scheduler; @@ -303,7 +302,7 @@ public void close() { public BarrageMessageProducer(final Scheduler scheduler, final BarrageStreamGenerator.Factory streamGeneratorFactory, - final BaseTable parent, + final BaseTable parent, final long updateIntervalMs, final Runnable onGetSnapshot) { this.logPrefix = "BarrageMessageProducer(" + Integer.toHexString(System.identityHashCode(this)) + "): "; @@ -596,9 +595,10 @@ private class DeltaListener extends InstrumentedTableUpdateListener { @Override public void onUpdate(final TableUpdate upstream) { synchronized (BarrageMessageProducer.this) { - if (lastIndexClockStep >= LogicalClock.DEFAULT.currentStep()) { + if (lastIndexClockStep >= parent.getUpdateGraph().clock().currentStep()) { throw new IllegalStateException(logPrefix + "lastIndexClockStep=" + lastIndexClockStep - + " >= notification on " + LogicalClock.DEFAULT.currentStep()); + + " >= notification on " + + parent.getUpdateGraph().clock().currentStep()); } final boolean shouldEnqueueDelta = !activeSubscriptions.isEmpty(); @@ -611,7 +611,7 @@ public void onUpdate(final TableUpdate upstream) { parentTableSize = parent.size(); // mark when the last indices are from, so that terminal notifications can make use of them if required - lastIndexClockStep = LogicalClock.DEFAULT.currentStep(); + lastIndexClockStep = parent.getUpdateGraph().clock().currentStep(); if (log.isDebugEnabled()) { try (final RowSet prevRowSet = parent.getRowSet().copyPrev()) { log.debug().append(logPrefix) @@ -841,9 +841,11 @@ private void enqueueUpdate(final TableUpdate upstream) { } if (log.isDebugEnabled()) { - log.debug().append(logPrefix).append("step=").append(LogicalClock.DEFAULT.currentStep()) - .append(", upstream=").append(upstream).append(", activeSubscriptions=") - .append(activeSubscriptions.size()) + log.debug().append(logPrefix) + .append("updateGraph=").append(parent.getUpdateGraph()) + .append(", step=").append(parent.getUpdateGraph().clock().currentStep()) + .append(", upstream=").append(upstream) + .append(", activeSubscriptions=").append(activeSubscriptions.size()) .append(", numFullSubscriptions=").append(numFullSubscriptions) .append(", addsToRecord=").append(addsToRecord) .append(", modsToRecord=").append(modsToRecord) @@ -920,11 +922,12 @@ private void enqueueUpdate(final TableUpdate upstream) { if (log.isDebugEnabled()) { log.debug().append(logPrefix).append("update accumulation complete for step=") - .append(LogicalClock.DEFAULT.currentStep()).endl(); + .append(parent.getUpdateGraph().clock().currentStep()).endl(); } - pendingDeltas.add(new Delta(LogicalClock.DEFAULT.currentStep(), deltaColumnOffset, upstream, addsToRecord, - modsToRecord, (BitSet) activeColumns.clone(), modifiedColumns)); + pendingDeltas + .add(new Delta(parent.getUpdateGraph().clock().currentStep(), deltaColumnOffset, + upstream, addsToRecord, modsToRecord, (BitSet) activeColumns.clone(), modifiedColumns)); } private void schedulePropagation() { @@ -1328,8 +1331,10 @@ private void updateSubscriptionsSnapshotAndPropagate() { if (SUBSCRIPTION_GROWTH_ENABLED && snapshot.rowsIncluded.size() > 0) { // very simplistic logic to take the last snapshot and extrapolate max number of rows that will // not exceed the target UGP processing time percentage + PeriodicUpdateGraph updateGraph = parent.getUpdateGraph().cast(); long targetNanos = (long) (TARGET_SNAPSHOT_PERCENTAGE - * UpdateGraphProcessor.DEFAULT.getTargetCycleDurationMillis() * 1000000); + * updateGraph.getTargetCycleDurationMillis() + * 1000000); long nanosPerCell = elapsed / (snapshot.rowsIncluded.size() * columnCount); @@ -2208,7 +2213,7 @@ BarrageMessage getSnapshot( } @Override - protected void destroy() { + protected synchronized void destroy() { super.destroy(); if (stats != null) { stats.stop(); @@ -2325,6 +2330,10 @@ public void addParentReference(final Object parent) { if (parent instanceof LivenessReferent) { manage((LivenessReferent) parent); } + if (parent instanceof NotificationQueue.Dependency) { + // ensure that we are in the same update graph + this.parent.getUpdateGraph((NotificationQueue.Dependency) parent); + } } } diff --git a/server/src/main/java/io/deephaven/server/console/NoConsoleSessionModule.java b/server/src/main/java/io/deephaven/server/console/NoConsoleSessionModule.java index f2eb1a65213..eef5255c7e4 100644 --- a/server/src/main/java/io/deephaven/server/console/NoConsoleSessionModule.java +++ b/server/src/main/java/io/deephaven/server/console/NoConsoleSessionModule.java @@ -7,10 +7,14 @@ import dagger.Provides; import dagger.multibindings.IntoMap; import dagger.multibindings.StringKey; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.util.NoLanguageDeephavenSession; import io.deephaven.engine.util.ScriptSession; import io.deephaven.server.console.groovy.InitScriptsModule; +import javax.inject.Named; + @Module(includes = InitScriptsModule.ServiceLoader.class) public class NoConsoleSessionModule { @Provides @@ -21,7 +25,8 @@ ScriptSession bindScriptSession(NoLanguageDeephavenSession noLanguageSession) { } @Provides - NoLanguageDeephavenSession bindNoLanguageSession() { - return new NoLanguageDeephavenSession(); + NoLanguageDeephavenSession bindNoLanguageSession( + @Named(PeriodicUpdateGraph.DEFAULT_UPDATE_GRAPH_NAME) final UpdateGraph updateGraph) { + return new NoLanguageDeephavenSession(updateGraph); } } diff --git a/server/src/main/java/io/deephaven/server/console/ScopeTicketResolver.java b/server/src/main/java/io/deephaven/server/console/ScopeTicketResolver.java index 6a1ea86a8d9..b8594dc28f2 100644 --- a/server/src/main/java/io/deephaven/server/console/ScopeTicketResolver.java +++ b/server/src/main/java/io/deephaven/server/console/ScopeTicketResolver.java @@ -10,7 +10,6 @@ import io.deephaven.engine.liveness.LivenessReferent; import io.deephaven.engine.table.Table; import io.deephaven.engine.updategraph.DynamicNode; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.ScriptSession; import io.deephaven.proto.backplane.grpc.Ticket; import io.deephaven.proto.flight.util.TicketRouterHelper; @@ -59,21 +58,22 @@ public SessionState.ExportObject flightInfoFor( // there is no mechanism to wait for a scope variable to resolve; require that the scope variable exists now final String scopeName = nameForDescriptor(descriptor, logId); - final Flight.FlightInfo flightInfo = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> { - final ScriptSession gss = scriptSessionProvider.get(); - Object scopeVar = gss.getVariable(scopeName, null); - if (scopeVar == null) { - throw Exceptions.statusRuntimeException(Code.NOT_FOUND, - "Could not resolve '" + logId + ": no variable exists with name '" + scopeName + "'"); - } - if (scopeVar instanceof Table) { - scopeVar = authTransformation.transform(scopeVar); - return TicketRouter.getFlightInfo((Table) scopeVar, descriptor, flightTicketForName(scopeName)); - } + final ScriptSession gss = scriptSessionProvider.get(); + final Flight.FlightInfo flightInfo = + gss.getExecutionContext().getUpdateGraph().sharedLock().computeLocked(() -> { + Object scopeVar = gss.getVariable(scopeName, null); + if (scopeVar == null) { + throw Exceptions.statusRuntimeException(Code.NOT_FOUND, + "Could not resolve '" + logId + ": no variable exists with name '" + scopeName + "'"); + } + if (scopeVar instanceof Table) { + scopeVar = authTransformation.transform(scopeVar); + return TicketRouter.getFlightInfo((Table) scopeVar, descriptor, flightTicketForName(scopeName)); + } - throw Exceptions.statusRuntimeException(Code.NOT_FOUND, - "Could not resolve '" + logId + "': no variable exists with name '" + scopeName + "'"); - }); + throw Exceptions.statusRuntimeException(Code.NOT_FOUND, + "Could not resolve '" + logId + "': no variable exists with name '" + scopeName + "'"); + }); return SessionState.wrapAsExport(flightInfo); } @@ -102,9 +102,9 @@ public SessionState.ExportObject resolve( private SessionState.ExportObject resolve( @Nullable final SessionState session, final String scopeName, final String logId) { + final ScriptSession gss = scriptSessionProvider.get(); // fetch the variable from the scope right now - T export = UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> { - final ScriptSession gss = scriptSessionProvider.get(); + T export = gss.getExecutionContext().getUpdateGraph().sharedLock().computeLocked(() -> { T scopeVar = null; try { // noinspection unchecked diff --git a/server/src/main/java/io/deephaven/server/console/groovy/GroovyConsoleSessionModule.java b/server/src/main/java/io/deephaven/server/console/groovy/GroovyConsoleSessionModule.java index c37cef12010..eeeead6567f 100644 --- a/server/src/main/java/io/deephaven/server/console/groovy/GroovyConsoleSessionModule.java +++ b/server/src/main/java/io/deephaven/server/console/groovy/GroovyConsoleSessionModule.java @@ -7,11 +7,14 @@ import dagger.Provides; import dagger.multibindings.IntoMap; import dagger.multibindings.StringKey; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.util.GroovyDeephavenSession; import io.deephaven.engine.util.GroovyDeephavenSession.RunScripts; import io.deephaven.engine.util.ScriptSession; import io.deephaven.plugin.type.ObjectTypeLookup; +import javax.inject.Named; import java.io.IOException; import java.io.UncheckedIOException; @@ -25,10 +28,13 @@ ScriptSession bindScriptSession(final GroovyDeephavenSession groovySession) { } @Provides - GroovyDeephavenSession bindGroovySession(ObjectTypeLookup lookup, final ScriptSession.Listener listener, + GroovyDeephavenSession bindGroovySession( + @Named(PeriodicUpdateGraph.DEFAULT_UPDATE_GRAPH_NAME) final UpdateGraph updateGraph, + final ObjectTypeLookup lookup, + final ScriptSession.Listener listener, final RunScripts runScripts) { try { - return new GroovyDeephavenSession(lookup, listener, runScripts); + return new GroovyDeephavenSession(updateGraph, lookup, listener, runScripts); } catch (final IOException e) { throw new UncheckedIOException(e); } diff --git a/server/src/main/java/io/deephaven/server/console/python/PythonConsoleSessionModule.java b/server/src/main/java/io/deephaven/server/console/python/PythonConsoleSessionModule.java index af3c334f9f3..78999fb4c0c 100644 --- a/server/src/main/java/io/deephaven/server/console/python/PythonConsoleSessionModule.java +++ b/server/src/main/java/io/deephaven/server/console/python/PythonConsoleSessionModule.java @@ -7,11 +7,14 @@ import dagger.Provides; import dagger.multibindings.IntoMap; import dagger.multibindings.StringKey; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.util.PythonEvaluatorJpy; import io.deephaven.engine.util.ScriptSession; import io.deephaven.integrations.python.PythonDeephavenSession; import io.deephaven.plugin.type.ObjectTypeLookup; +import javax.inject.Named; import java.io.IOException; import java.io.UncheckedIOException; @@ -25,10 +28,13 @@ ScriptSession bindScriptSession(PythonDeephavenSession pythonSession) { } @Provides - PythonDeephavenSession bindPythonSession(ObjectTypeLookup lookup, final ScriptSession.Listener listener, - PythonEvaluatorJpy pythonEvaluator) { + PythonDeephavenSession bindPythonSession( + @Named(PeriodicUpdateGraph.DEFAULT_UPDATE_GRAPH_NAME) final UpdateGraph updateGraph, + final ObjectTypeLookup lookup, + final ScriptSession.Listener listener, + final PythonEvaluatorJpy pythonEvaluator) { try { - return new PythonDeephavenSession(lookup, listener, true, pythonEvaluator); + return new PythonDeephavenSession(updateGraph, lookup, listener, true, pythonEvaluator); } catch (IOException e) { throw new UncheckedIOException("Unable to run python startup scripts", e); } diff --git a/server/src/main/java/io/deephaven/server/partitionedtable/PartitionedTableServiceGrpcImpl.java b/server/src/main/java/io/deephaven/server/partitionedtable/PartitionedTableServiceGrpcImpl.java index 175c08f3d70..8cc9b6011f0 100644 --- a/server/src/main/java/io/deephaven/server/partitionedtable/PartitionedTableServiceGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/partitionedtable/PartitionedTableServiceGrpcImpl.java @@ -7,7 +7,6 @@ import io.deephaven.auth.codegen.impl.PartitionedTableServiceContextualAuthWiring; import io.deephaven.engine.table.PartitionedTable; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; import io.deephaven.proto.backplane.grpc.ExportedTableCreationResponse; @@ -38,7 +37,6 @@ public class PartitionedTableServiceGrpcImpl extends PartitionedTableServiceGrpc private final TicketRouter ticketRouter; private final SessionService sessionService; - private final UpdateGraphProcessor updateGraphProcessor; private final PartitionedTableServiceContextualAuthWiring authWiring; private final TicketResolverBase.AuthTransformation authorizationTransformation; @@ -46,12 +44,10 @@ public class PartitionedTableServiceGrpcImpl extends PartitionedTableServiceGrpc public PartitionedTableServiceGrpcImpl( TicketRouter ticketRouter, SessionService sessionService, - UpdateGraphProcessor updateGraphProcessor, AuthorizationProvider authorizationProvider, PartitionedTableServiceContextualAuthWiring authWiring) { this.ticketRouter = ticketRouter; this.sessionService = sessionService; - this.updateGraphProcessor = updateGraphProcessor; this.authWiring = authWiring; this.authorizationTransformation = authorizationProvider.getTicketTransformation(); } @@ -91,12 +87,12 @@ public void merge( .require(partitionedTable) .onError(responseObserver) .submit(() -> { + final Table table = partitionedTable.get().table(); authWiring.checkPermissionMerge(session.getAuthContext(), request, - Collections.singletonList(partitionedTable.get().table())); + Collections.singletonList(table)); Table merged; - if (partitionedTable.get().table().isRefreshing()) { - merged = updateGraphProcessor.sharedLock() - .computeLocked(partitionedTable.get()::merge); + if (table.isRefreshing()) { + merged = table.getUpdateGraph().sharedLock().computeLocked(partitionedTable.get()::merge); } else { merged = partitionedTable.get().merge(); } @@ -141,7 +137,7 @@ public void getTable( .toArray(); table = partitionedTable.get().constituentFor(values); } else { - table = updateGraphProcessor.sharedLock().computeLocked(() -> { + table = keyTable.getUpdateGraph().sharedLock().computeLocked(() -> { long keyTableSize = keyTable.size(); if (keyTableSize != 1) { throw Exceptions.statusRuntimeException(Code.INVALID_ARGUMENT, diff --git a/server/src/main/java/io/deephaven/server/runner/DeephavenApiServer.java b/server/src/main/java/io/deephaven/server/runner/DeephavenApiServer.java index a536a2e4109..2a815abdeb3 100644 --- a/server/src/main/java/io/deephaven/server/runner/DeephavenApiServer.java +++ b/server/src/main/java/io/deephaven/server/runner/DeephavenApiServer.java @@ -9,7 +9,8 @@ import io.deephaven.engine.table.impl.perf.UpdatePerformanceTracker; import io.deephaven.engine.table.impl.util.EngineMetrics; import io.deephaven.engine.table.impl.util.ServerStateTracker; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.util.AbstractScriptSession; import io.deephaven.engine.util.ScriptSession; import io.deephaven.internal.log.LoggerFactory; @@ -22,11 +23,13 @@ import io.deephaven.uri.resolver.UriResolver; import io.deephaven.uri.resolver.UriResolvers; import io.deephaven.uri.resolver.UriResolversInstance; +import io.deephaven.util.SafeCloseable; import io.deephaven.util.annotations.VisibleForTesting; import io.deephaven.util.process.ProcessEnvironment; import io.deephaven.util.process.ShutdownManager; import javax.inject.Inject; +import javax.inject.Named; import javax.inject.Provider; import java.io.IOException; import java.util.Map; @@ -41,7 +44,7 @@ public class DeephavenApiServer { private static final Logger log = LoggerFactory.getLogger(DeephavenApiServer.class); private final GrpcServer server; - private final UpdateGraphProcessor ugp; + private final UpdateGraph ug; private final LogInit logInit; private final Provider scriptSessionProvider; private final PluginRegistration pluginRegistration; @@ -55,7 +58,7 @@ public class DeephavenApiServer { @Inject public DeephavenApiServer( final GrpcServer server, - final UpdateGraphProcessor ugp, + @Named(PeriodicUpdateGraph.DEFAULT_UPDATE_GRAPH_NAME) final UpdateGraph ug, final LogInit logInit, final Provider scriptSessionProvider, final PluginRegistration pluginRegistration, @@ -66,7 +69,7 @@ public DeephavenApiServer( final Provider executionContextProvider, final ServerConfig serverConfig) { this.server = server; - this.ugp = ugp; + this.ug = ug; this.logInit = logInit; this.scriptSessionProvider = scriptSessionProvider; this.pluginRegistration = pluginRegistration; @@ -127,16 +130,20 @@ public DeephavenApiServer run() throws IOException, ClassNotFoundException, Time scriptSessionProvider.get(); pluginRegistration.registerAll(); - log.info().append("Starting UGP...").endl(); - ugp.start(); + log.info().append("Starting UpdateGraph...").endl(); + ug.cast().start(); - EngineMetrics.maybeStartStatsCollection(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(ug).open()) { + EngineMetrics.maybeStartStatsCollection(); + } log.info().append("Starting Performance Trackers...").endl(); QueryPerformanceRecorder.installPoolAllocationRecorder(); QueryPerformanceRecorder.installUpdateGraphLockInstrumentation(); - UpdatePerformanceTracker.start(); - ServerStateTracker.start(); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph(ug).open()) { + UpdatePerformanceTracker.start(); + ServerStateTracker.start(); + } for (UriResolver resolver : uriResolvers.resolvers()) { log.debug().append("Found table resolver ").append(resolver.getClass().toString()).endl(); @@ -175,4 +182,7 @@ void startForUnitTests() throws Exception { server.start(); } + public UpdateGraph getUpdateGraph() { + return ug; + } } diff --git a/server/src/main/java/io/deephaven/server/runner/DeephavenApiServerModule.java b/server/src/main/java/io/deephaven/server/runner/DeephavenApiServerModule.java index 33cb476f051..3b3c82e0e49 100644 --- a/server/src/main/java/io/deephaven/server/runner/DeephavenApiServerModule.java +++ b/server/src/main/java/io/deephaven/server/runner/DeephavenApiServerModule.java @@ -9,7 +9,9 @@ import io.deephaven.base.clock.Clock; import io.deephaven.chunk.util.pools.MultiChunkPool; import io.deephaven.configuration.Configuration; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.context.ExecutionContext; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.engine.util.ScriptSession; import io.deephaven.server.appmode.ApplicationsModule; import io.deephaven.server.config.ConfigServiceModule; @@ -96,8 +98,10 @@ public ScriptSession provideScriptSession(Map> s @Provides @Singleton - public static Scheduler provideScheduler(final @Named("scheduler.poolSize") int poolSize) { - final ThreadFactory concurrentThreadFactory = new ThreadFactory("Scheduler-Concurrent"); + public static Scheduler provideScheduler( + final @Named(PeriodicUpdateGraph.DEFAULT_UPDATE_GRAPH_NAME) UpdateGraph updateGraph, + final @Named("scheduler.poolSize") int poolSize) { + final ThreadFactory concurrentThreadFactory = new ThreadFactory("Scheduler-Concurrent", updateGraph); final ScheduledExecutorService concurrentExecutor = new ScheduledThreadPoolExecutor(poolSize, concurrentThreadFactory) { @Override @@ -107,7 +111,7 @@ protected void afterExecute(final Runnable task, final Throwable error) { } }; - final ThreadFactory serialThreadFactory = new ThreadFactory("Scheduler-Serial"); + final ThreadFactory serialThreadFactory = new ThreadFactory("Scheduler-Serial", updateGraph); final ExecutorService serialExecutor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), serialThreadFactory) { @@ -146,19 +150,27 @@ private static void afterExecute(final String executorType, final Runnable task, @Provides @Singleton - public static UpdateGraphProcessor provideUpdateGraphProcessor() { - return UpdateGraphProcessor.DEFAULT; + @Named(PeriodicUpdateGraph.DEFAULT_UPDATE_GRAPH_NAME) + public static UpdateGraph provideUpdateGraph() { + return PeriodicUpdateGraph.newBuilder(PeriodicUpdateGraph.DEFAULT_UPDATE_GRAPH_NAME) + .numUpdateThreads(PeriodicUpdateGraph.NUM_THREADS_DEFAULT_UPDATE_GRAPH) + .existingOrBuild(); } private static class ThreadFactory extends NamingThreadFactory { - public ThreadFactory(final String name) { + private final UpdateGraph updateGraph; + + public ThreadFactory(final String name, final UpdateGraph updateGraph) { super(DeephavenApiServer.class, name); + this.updateGraph = updateGraph; } @Override public Thread newThread(final @NotNull Runnable r) { return super.newThread(ThreadInitializationFactory.wrapRunnable(() -> { MultiChunkPool.enableDedicatedPoolForThisThread(); + // noinspection resource + ExecutionContext.getContext().withUpdateGraph(updateGraph).open(); r.run(); })); } diff --git a/server/src/main/java/io/deephaven/server/table/ExportedTableUpdateListener.java b/server/src/main/java/io/deephaven/server/table/ExportedTableUpdateListener.java index 81dbacd5496..abf3956ebd9 100644 --- a/server/src/main/java/io/deephaven/server/table/ExportedTableUpdateListener.java +++ b/server/src/main/java/io/deephaven/server/table/ExportedTableUpdateListener.java @@ -4,6 +4,7 @@ package io.deephaven.server.table; import com.google.rpc.Code; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.TrackingRowSet; import io.deephaven.engine.table.TableUpdate; import io.deephaven.engine.table.impl.BaseTable; @@ -11,6 +12,7 @@ import io.deephaven.engine.table.impl.NotificationStepReceiver; import io.deephaven.engine.table.impl.SwapListener; import io.deephaven.engine.table.impl.UncoalescedTable; +import io.deephaven.engine.updategraph.NotificationQueue; import io.deephaven.hash.KeyedLongObjectHashMap; import io.deephaven.hash.KeyedLongObjectKey; import io.deephaven.internal.log.LoggerFactory; @@ -21,6 +23,7 @@ import io.deephaven.proto.util.Exceptions; import io.deephaven.proto.util.ExportTicketHelper; import io.deephaven.server.session.SessionState; +import io.deephaven.util.SafeCloseable; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; import org.apache.commons.lang3.mutable.MutableLong; @@ -76,7 +79,10 @@ public void onNext(final ExportNotification notification) { try { final Object obj = export.get(); if (obj instanceof BaseTable) { - onNewTableExport(ticket, exportId, (BaseTable) obj); + try (final SafeCloseable ignored = ExecutionContext.getContext().withUpdateGraph( + ((NotificationQueue.Dependency) obj).getUpdateGraph()).open()) { + onNewTableExport(ticket, exportId, (BaseTable) obj); + } } } finally { export.dropReference(); diff --git a/server/src/main/java/io/deephaven/server/table/ops/AjRajGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/AjRajGrpcImpl.java index b013fa3d32e..a457fe4c566 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/AjRajGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/AjRajGrpcImpl.java @@ -7,7 +7,6 @@ import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.proto.backplane.grpc.AjRajTablesRequest; import io.deephaven.proto.backplane.grpc.BatchTableRequest.Operation; import io.deephaven.proto.backplane.grpc.TableReference; @@ -29,46 +28,37 @@ public abstract class AjRajGrpcImpl extends GrpcTableOperation joinMatchParser; private AjRajGrpcImpl( PermissionFunction permission, Function getRequest, - Function joinMatchParser, - UpdateGraphProcessor updateGraphProcessor) { + Function joinMatchParser) { super( permission, getRequest, AjRajTablesRequest::getResultId, AjRajGrpcImpl::refs); this.joinMatchParser = Objects.requireNonNull(joinMatchParser); - this.updateGraphProcessor = Objects.requireNonNull(updateGraphProcessor); } @Override @@ -107,7 +97,10 @@ public Table create(AjRajTablesRequest request, List> source } private SafeCloseable lock(Table left, Table right) { - return left.isRefreshing() || right.isRefreshing() ? updateGraphProcessor.sharedLock().lockCloseable() : null; + if (left.isRefreshing() || right.isRefreshing()) { + return left.getUpdateGraph(right).sharedLock().lockCloseable(); + } + return null; } private static List refs(AjRajTablesRequest request) { diff --git a/server/src/main/java/io/deephaven/server/table/ops/ApplyPreviewColumnsGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/ApplyPreviewColumnsGrpcImpl.java index 9a7acbf7052..000ba05560a 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/ApplyPreviewColumnsGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/ApplyPreviewColumnsGrpcImpl.java @@ -27,6 +27,7 @@ protected ApplyPreviewColumnsGrpcImpl(final TableServiceContextualAuthWiring aut public Table create(final ApplyPreviewColumnsRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); - return ColumnPreviewManager.applyPreview(sourceTables.get(0).get()); + final Table source = sourceTables.get(0).get(); + return ColumnPreviewManager.applyPreview(source); } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/ComboAggregateGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/ComboAggregateGrpcImpl.java index 94802340df5..fd74028d507 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/ComboAggregateGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/ComboAggregateGrpcImpl.java @@ -117,14 +117,12 @@ public Table create(final ComboAggregateRequest request, .stream() .map(ColumnName::of) .toArray(ColumnName[]::new); - final Table result; if (isSimpleAggregation(request)) { // This is a special case with a special operator that can be invoked right off of the table api. - result = singleAggregateHelper(parent, groupByColumns, request.getAggregates(0)); + return singleAggregateHelper(parent, groupByColumns, request.getAggregates(0)); } else { - result = comboAggregateHelper(parent, groupByColumns, request.getAggregatesList()); + return comboAggregateHelper(parent, groupByColumns, request.getAggregatesList()); } - return result; } private static Table singleAggregateHelper(final Table parent, final ColumnName[] groupByColumns, diff --git a/server/src/main/java/io/deephaven/server/table/ops/DropColumnsGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/DropColumnsGrpcImpl.java index a532c89b356..1d0ec203185 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/DropColumnsGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/DropColumnsGrpcImpl.java @@ -27,6 +27,7 @@ public DropColumnsGrpcImpl(final TableServiceContextualAuthWiring authWiring) { public Table create(final DropColumnsRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); - return sourceTables.get(0).get().dropColumns(request.getColumnNamesList()); + final Table source = sourceTables.get(0).get(); + return source.dropColumns(request.getColumnNamesList()); } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/HeadOrTailByGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/HeadOrTailByGrpcImpl.java index 5436de9ceb5..ed187bac668 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/HeadOrTailByGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/HeadOrTailByGrpcImpl.java @@ -10,12 +10,13 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.select.SelectColumn; import io.deephaven.engine.table.impl.select.SelectColumnFactory; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.proto.backplane.grpc.BatchTableRequest; import io.deephaven.proto.backplane.grpc.HeadOrTailByRequest; import io.deephaven.proto.util.Exceptions; import io.deephaven.server.session.SessionState; import io.deephaven.server.table.validation.ColumnExpressionValidator; +import io.deephaven.util.SafeCloseable; import io.grpc.StatusRuntimeException; import javax.inject.Inject; @@ -30,16 +31,13 @@ protected interface RealTableOperation { } private final RealTableOperation realTableOperation; - private final UpdateGraphProcessor updateGraphProcessor; protected HeadOrTailByGrpcImpl( final PermissionFunction permission, final Function getRequest, - final RealTableOperation realTableOperation, - final UpdateGraphProcessor updateGraphProcessor) { + final RealTableOperation realTableOperation) { super(permission, getRequest, HeadOrTailByRequest::getResultId, HeadOrTailByRequest::getSourceId); this.realTableOperation = realTableOperation; - this.updateGraphProcessor = updateGraphProcessor; } @Override @@ -64,31 +62,34 @@ public Table create(final HeadOrTailByRequest request, // overloads that take SelectColumn arrays throw UnsupportedOperationException, but we validate anyway ColumnExpressionValidator.validateColumnExpressions(expressions, columnSpecs, parent); - // note that headBy/tailBy use ungroup which currently requires the UGP lock - return updateGraphProcessor.sharedLock() - .computeLocked(() -> realTableOperation.apply(parent, request.getNumRows(), columnSpecs)); + try (final SafeCloseable ignored = lock(parent)) { + return realTableOperation.apply(parent, request.getNumRows(), columnSpecs); + } + } + + private SafeCloseable lock(Table parent) { + if (parent.isRefreshing()) { + UpdateGraph updateGraph = parent.getUpdateGraph(); + return updateGraph.sharedLock().lockCloseable(); + } else { + return null; + } } @Singleton public static class HeadByGrpcImpl extends HeadOrTailByGrpcImpl { @Inject - public HeadByGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { - super(authWiring::checkPermissionHeadBy, BatchTableRequest.Operation::getHeadBy, Table::headBy, - updateGraphProcessor); + public HeadByGrpcImpl(final TableServiceContextualAuthWiring authWiring) { + super(authWiring::checkPermissionHeadBy, BatchTableRequest.Operation::getHeadBy, Table::headBy); } } @Singleton public static class TailByGrpcImpl extends HeadOrTailByGrpcImpl { @Inject - public TailByGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { - super(authWiring::checkPermissionTailBy, BatchTableRequest.Operation::getTailBy, Table::tailBy, - updateGraphProcessor); + public TailByGrpcImpl(final TableServiceContextualAuthWiring authWiring) { + super(authWiring::checkPermissionTailBy, BatchTableRequest.Operation::getTailBy, Table::tailBy); } } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/HeadOrTailGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/HeadOrTailGrpcImpl.java index 91c0a59e083..7a8c0ab22ec 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/HeadOrTailGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/HeadOrTailGrpcImpl.java @@ -47,7 +47,8 @@ public void validateRequest(final HeadOrTailRequest request) throws StatusRuntim public Table create(final HeadOrTailRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); - return realTableOperation.apply(sourceTables.get(0).get(), request.getNumRows()); + final Table source = sourceTables.get(0).get(); + return realTableOperation.apply(source, request.getNumRows()); } @Singleton diff --git a/server/src/main/java/io/deephaven/server/table/ops/JoinTablesGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/JoinTablesGrpcImpl.java index 178bcc2056e..00d23904697 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/JoinTablesGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/JoinTablesGrpcImpl.java @@ -12,7 +12,6 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.MatchPair; import io.deephaven.engine.table.impl.select.MatchPairFactory; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.proto.backplane.grpc.AsOfJoinTablesRequest; import io.deephaven.proto.backplane.grpc.AsOfJoinTablesRequest.MatchRule; import io.deephaven.proto.backplane.grpc.BatchTableRequest; @@ -39,10 +38,9 @@ protected interface RealTableOperation { private final Function> getColMatchList; private final Function> getColAddList; - private final UpdateGraphProcessor updateGraphProcessor; private final RealTableOperation realTableOperation; - protected JoinTablesGrpcImpl(final UpdateGraphProcessor updateGraphProcessor, + protected JoinTablesGrpcImpl( final PermissionFunction permission, final Function getRequest, final Function getTicket, @@ -51,7 +49,6 @@ protected JoinTablesGrpcImpl(final UpdateGraphProcessor updateGraphProcessor, final Function> getColAddList, final RealTableOperation realTableOperation) { super(permission, getRequest, getTicket, getDependencies); - this.updateGraphProcessor = updateGraphProcessor; this.getColMatchList = getColMatchList; this.getColAddList = getColAddList; this.realTableOperation = realTableOperation; @@ -90,8 +87,8 @@ public Table create(final T request, final List if (!lhs.isRefreshing() && !rhs.isRefreshing()) { result = realTableOperation.apply(lhs, rhs, columnsToMatch, columnsToAdd, request); } else { - result = updateGraphProcessor.sharedLock().computeLocked( - () -> realTableOperation.apply(lhs, rhs, columnsToMatch, columnsToAdd, request)); + result = lhs.getUpdateGraph(rhs).sharedLock() + .computeLocked(() -> realTableOperation.apply(lhs, rhs, columnsToMatch, columnsToAdd, request)); } return result; } @@ -104,10 +101,8 @@ public static class AsOfJoinTablesGrpcImpl extends JoinTablesGrpcImpl List.of(request.getLeftId(), request.getRightId()); @Inject - protected AsOfJoinTablesGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { - super(updateGraphProcessor, authWiring::checkPermissionAsOfJoinTables, + protected AsOfJoinTablesGrpcImpl(final TableServiceContextualAuthWiring authWiring) { + super(authWiring::checkPermissionAsOfJoinTables, BatchTableRequest.Operation::getAsOfJoin, AsOfJoinTablesRequest::getResultId, EXTRACT_DEPS, AsOfJoinTablesRequest::getColumnsToMatchList, AsOfJoinTablesRequest::getColumnsToAddList, @@ -159,10 +154,8 @@ public static class CrossJoinTablesGrpcImpl extends JoinTablesGrpcImpl List.of(request.getLeftId(), request.getRightId()); @Inject - public CrossJoinTablesGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { - super(updateGraphProcessor, authWiring::checkPermissionCrossJoinTables, + public CrossJoinTablesGrpcImpl(final TableServiceContextualAuthWiring authWiring) { + super(authWiring::checkPermissionCrossJoinTables, BatchTableRequest.Operation::getCrossJoin, CrossJoinTablesRequest::getResultId, EXTRACT_DEPS, CrossJoinTablesRequest::getColumnsToMatchList, CrossJoinTablesRequest::getColumnsToAddList, @@ -190,10 +183,8 @@ public static class ExactJoinTablesGrpcImpl extends JoinTablesGrpcImpl List.of(request.getLeftId(), request.getRightId()); @Inject - public ExactJoinTablesGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { - super(updateGraphProcessor, authWiring::checkPermissionExactJoinTables, + public ExactJoinTablesGrpcImpl(final TableServiceContextualAuthWiring authWiring) { + super(authWiring::checkPermissionExactJoinTables, BatchTableRequest.Operation::getExactJoin, ExactJoinTablesRequest::getResultId, EXTRACT_DEPS, ExactJoinTablesRequest::getColumnsToMatchList, ExactJoinTablesRequest::getColumnsToAddList, @@ -214,10 +205,8 @@ public static class LeftJoinTablesGrpcImpl extends JoinTablesGrpcImpl List.of(request.getLeftId(), request.getRightId()); @Inject - public LeftJoinTablesGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { - super(updateGraphProcessor, authWiring::checkPermissionLeftJoinTables, + public LeftJoinTablesGrpcImpl(final TableServiceContextualAuthWiring authWiring) { + super(authWiring::checkPermissionLeftJoinTables, BatchTableRequest.Operation::getLeftJoin, LeftJoinTablesRequest::getResultId, EXTRACT_DEPS, LeftJoinTablesRequest::getColumnsToMatchList, LeftJoinTablesRequest::getColumnsToAddList, @@ -238,10 +227,8 @@ public static class NaturalJoinTablesGrpcImpl extends JoinTablesGrpcImpl List.of(request.getLeftId(), request.getRightId()); @Inject - public NaturalJoinTablesGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { - super(updateGraphProcessor, authWiring::checkPermissionNaturalJoinTables, + public NaturalJoinTablesGrpcImpl(final TableServiceContextualAuthWiring authWiring) { + super(authWiring::checkPermissionNaturalJoinTables, BatchTableRequest.Operation::getNaturalJoin, NaturalJoinTablesRequest::getResultId, EXTRACT_DEPS, NaturalJoinTablesRequest::getColumnsToMatchList, NaturalJoinTablesRequest::getColumnsToAddList, diff --git a/server/src/main/java/io/deephaven/server/table/ops/MergeTablesGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/MergeTablesGrpcImpl.java index 9bd9997dcdb..9e55a600a05 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/MergeTablesGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/MergeTablesGrpcImpl.java @@ -7,12 +7,13 @@ import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.util.TableTools; import io.deephaven.proto.backplane.grpc.BatchTableRequest; import io.deephaven.proto.backplane.grpc.MergeTablesRequest; import io.deephaven.proto.util.Exceptions; import io.deephaven.server.session.SessionState; +import io.deephaven.util.SafeCloseable; import io.grpc.StatusRuntimeException; import javax.inject.Inject; @@ -23,15 +24,11 @@ @Singleton public class MergeTablesGrpcImpl extends GrpcTableOperation { - private final UpdateGraphProcessor updateGraphProcessor; - @Inject public MergeTablesGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { + final TableServiceContextualAuthWiring authWiring) { super(authWiring::checkPermissionMergeTables, BatchTableRequest.Operation::getMerge, MergeTablesRequest::getResultId, MergeTablesRequest::getSourceIdsList); - this.updateGraphProcessor = updateGraphProcessor; } @Override @@ -51,16 +48,18 @@ public Table create(final MergeTablesRequest request, .map(SessionState.ExportObject::get) .collect(Collectors.toList()); - Table result; if (tables.stream().noneMatch(Table::isRefreshing)) { - result = keyColumn.isEmpty() ? TableTools.merge(tables) : TableTools.mergeSorted(keyColumn, tables); + return keyColumn.isEmpty() ? TableTools.merge(tables) : TableTools.mergeSorted(keyColumn, tables); } else { - result = updateGraphProcessor.sharedLock().computeLocked(() -> TableTools.merge(tables)); + final UpdateGraph updateGraph = tables.get(0).getUpdateGraph(tables.toArray(Table[]::new)); + Table result; + try (final SafeCloseable ignored = updateGraph.sharedLock().lockCloseable()) { + result = TableTools.merge(tables); + } if (!keyColumn.isEmpty()) { result = result.sort(keyColumn); } + return result; } - - return result; } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/RangeJoinGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/RangeJoinGrpcImpl.java index ed5806a4b0b..354d184f65b 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/RangeJoinGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/RangeJoinGrpcImpl.java @@ -9,7 +9,6 @@ import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.proto.backplane.grpc.Aggregation; import io.deephaven.proto.backplane.grpc.BatchTableRequest; import io.deephaven.proto.backplane.grpc.RangeJoinTablesRequest; @@ -30,18 +29,14 @@ @Singleton public final class RangeJoinGrpcImpl extends GrpcTableOperation { - private final UpdateGraphProcessor updateGraphProcessor; - @Inject public RangeJoinGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { + final TableServiceContextualAuthWiring authWiring) { super( authWiring::checkPermissionRangeJoinTables, BatchTableRequest.Operation::getRangeJoin, RangeJoinTablesRequest::getResultId, RangeJoinGrpcImpl::refs); - this.updateGraphProcessor = updateGraphProcessor; } private static List refs(RangeJoinTablesRequest request) { @@ -96,7 +91,7 @@ public Table create(RangeJoinTablesRequest request, List> so if (!leftTable.isRefreshing() && !rightTable.isRefreshing()) { return leftTable.rangeJoin(rightTable, exactMatches, rangeMatch, aggregations); } else { - return updateGraphProcessor.sharedLock().computeLocked( + return leftTable.getUpdateGraph(rightTable).sharedLock().computeLocked( () -> leftTable.rangeJoin(rightTable, exactMatches, rangeMatch, aggregations)); } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/SnapshotTableGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/SnapshotTableGrpcImpl.java index a659d8c5c4a..ff039f689a1 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/SnapshotTableGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/SnapshotTableGrpcImpl.java @@ -6,7 +6,6 @@ import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.proto.backplane.grpc.BatchTableRequest.Operation; import io.deephaven.proto.backplane.grpc.SnapshotTableRequest; import io.deephaven.server.grpc.Common; @@ -18,23 +17,17 @@ import javax.inject.Inject; import javax.inject.Singleton; import java.util.List; -import java.util.Objects; @Singleton public final class SnapshotTableGrpcImpl extends GrpcTableOperation { - private final UpdateGraphProcessor updateGraphProcessor; - @Inject - public SnapshotTableGrpcImpl( - final TableServiceContextualAuthWiring auth, - final UpdateGraphProcessor updateGraphProcessor) { + public SnapshotTableGrpcImpl(final TableServiceContextualAuthWiring auth) { super( auth::checkPermissionSnapshot, Operation::getSnapshot, SnapshotTableRequest::getResultId, SnapshotTableRequest::getSourceId); - this.updateGraphProcessor = Objects.requireNonNull(updateGraphProcessor); } @Override @@ -50,12 +43,16 @@ public Table create( final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table base = sourceTables.get(0).get(); - try (final SafeCloseable _lock = lock(base)) { + try (final SafeCloseable ignored = lock(base)) { return base.snapshot(); } } private SafeCloseable lock(Table base) { - return base.isRefreshing() ? updateGraphProcessor.sharedLock().lockCloseable() : null; + if (base.isRefreshing()) { + return base.getUpdateGraph().sharedLock().lockCloseable(); + } else { + return null; + } } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/SnapshotWhenTableGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/SnapshotWhenTableGrpcImpl.java index 7cbb18fcba9..5e2cbda7a42 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/SnapshotWhenTableGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/SnapshotWhenTableGrpcImpl.java @@ -11,7 +11,7 @@ import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.proto.backplane.grpc.BatchTableRequest.Operation; import io.deephaven.proto.backplane.grpc.SnapshotWhenTableRequest; import io.deephaven.proto.backplane.grpc.TableReference; @@ -26,7 +26,6 @@ import javax.inject.Singleton; import java.util.Arrays; import java.util.List; -import java.util.Objects; @Singleton public final class SnapshotWhenTableGrpcImpl extends GrpcTableOperation { @@ -52,18 +51,13 @@ private static SnapshotWhenOptions options(SnapshotWhenTableRequest request) { return builder.build(); } - private final UpdateGraphProcessor updateGraphProcessor; - @Inject - public SnapshotWhenTableGrpcImpl( - final TableServiceContextualAuthWiring auth, - final UpdateGraphProcessor updateGraphProcessor) { + public SnapshotWhenTableGrpcImpl(final TableServiceContextualAuthWiring auth) { super( auth::checkPermissionSnapshotWhen, Operation::getSnapshotWhen, SnapshotWhenTableRequest::getResultId, SnapshotWhenTableGrpcImpl::refs); - this.updateGraphProcessor = Objects.requireNonNull(updateGraphProcessor); } @Override @@ -88,12 +82,19 @@ public Table create( final Table base = sourceTables.get(0).get(); final Table trigger = sourceTables.get(1).get(); final SnapshotWhenOptions options = options(request); - try (final SafeCloseable _lock = lock(base, trigger)) { + try (final SafeCloseable ignored = lock(base, trigger)) { return base.snapshotWhen(trigger, options); } } private SafeCloseable lock(Table base, Table trigger) { - return base.isRefreshing() || trigger.isRefreshing() ? updateGraphProcessor.sharedLock().lockCloseable() : null; + if (base.isRefreshing()) { + UpdateGraph updateGraph = base.getUpdateGraph(); + return updateGraph.sharedLock().lockCloseable(); + } else if (trigger.isRefreshing()) { + UpdateGraph updateGraph = trigger.getUpdateGraph(); + return updateGraph.sharedLock().lockCloseable(); + } + return null; } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/TimeTableGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/TimeTableGrpcImpl.java index d12dcbfb646..41637f389ad 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/TimeTableGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/TimeTableGrpcImpl.java @@ -6,9 +6,9 @@ import com.google.rpc.Code; import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.TimeTable; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.proto.backplane.grpc.BatchTableRequest; import io.deephaven.proto.backplane.grpc.TimeTableRequest; import io.deephaven.proto.util.Exceptions; @@ -25,17 +25,14 @@ public class TimeTableGrpcImpl extends GrpcTableOperation { private final Scheduler scheduler; - private final UpdateGraphProcessor updateGraphProcessor; @Inject() public TimeTableGrpcImpl( final TableServiceContextualAuthWiring authWiring, - final Scheduler scheduler, - final UpdateGraphProcessor updateGraphProcessor) { + final Scheduler scheduler) { super(authWiring::checkPermissionTimeTable, BatchTableRequest.Operation::getTimeTable, TimeTableRequest::getResultId); this.scheduler = scheduler; - this.updateGraphProcessor = updateGraphProcessor; } @Override @@ -53,7 +50,7 @@ public Table create(final TimeTableRequest request, Assert.eq(sourceTables.size(), "sourceTables.size()", 0); final long startTime = request.getStartTimeNanos(); final long periodValue = request.getPeriodNanos(); - return new TimeTable(updateGraphProcessor, scheduler, + return new TimeTable(ExecutionContext.getContext().getUpdateGraph(), scheduler, startTime <= 0 ? null : DateTimeUtils.epochNanosToInstant(startTime), periodValue, false); } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/UngroupGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/UngroupGrpcImpl.java index a975c8d7c95..3eb5f25ebfd 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/UngroupGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/UngroupGrpcImpl.java @@ -7,10 +7,10 @@ import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.proto.backplane.grpc.BatchTableRequest; import io.deephaven.proto.backplane.grpc.UngroupRequest; import io.deephaven.server.session.SessionState; +import io.deephaven.util.SafeCloseable; import javax.inject.Inject; import javax.inject.Singleton; @@ -20,15 +20,10 @@ @Singleton public class UngroupGrpcImpl extends GrpcTableOperation { - private final UpdateGraphProcessor updateGraphProcessor; - @Inject - public UngroupGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { + public UngroupGrpcImpl(final TableServiceContextualAuthWiring authWiring) { super(authWiring::checkPermissionUngroup, BatchTableRequest.Operation::getUngroup, UngroupRequest::getResultId, UngroupRequest::getSourceId); - this.updateGraphProcessor = updateGraphProcessor; } @Override @@ -40,7 +35,16 @@ public Table create(final UngroupRequest request, .stream() .map(ColumnName::of) .collect(Collectors.toList()); - return updateGraphProcessor.sharedLock() - .computeLocked(() -> parent.ungroup(request.getNullFill(), columnsToUngroup)); + try (final SafeCloseable ignored = lock(parent)) { + return parent.ungroup(request.getNullFill(), columnsToUngroup); + } + } + + private static SafeCloseable lock(Table base) { + if (base.isRefreshing()) { + return base.getUpdateGraph().sharedLock().lockCloseable(); + } else { + return null; + } } } diff --git a/server/src/main/java/io/deephaven/server/table/ops/UpdateByGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/UpdateByGrpcImpl.java index 4dbe016c599..b92103989ec 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/UpdateByGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/UpdateByGrpcImpl.java @@ -10,8 +10,9 @@ import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.proto.backplane.grpc.*; +import io.deephaven.proto.backplane.grpc.BatchTableRequest; +import io.deephaven.proto.backplane.grpc.UpdateByRequest; import io.deephaven.proto.backplane.grpc.UpdateByRequest.UpdateByOperation.UpdateByColumn; import io.deephaven.proto.backplane.grpc.UpdateByRequest.UpdateByOperation.UpdateByColumn.UpdateBySpec.*; import io.deephaven.proto.backplane.grpc.UpdateByRequest.UpdateByOptions; @@ -71,7 +72,7 @@ public Table create(final UpdateByRequest request, request.getGroupByColumnsList().stream().map(ColumnName::of).collect(Collectors.toList()); if (parent.isRefreshing()) { - return UpdateGraphProcessor.DEFAULT.sharedLock().computeLocked(() -> control == null + return parent.getUpdateGraph().sharedLock().computeLocked(() -> control == null ? parent.updateBy(operations, groupByColumns) : parent.updateBy(control, operations, groupByColumns)); } diff --git a/server/src/main/java/io/deephaven/server/table/ops/UpdateOrSelectGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/UpdateOrSelectGrpcImpl.java index 070a1612aad..2d389784f46 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/UpdateOrSelectGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/UpdateOrSelectGrpcImpl.java @@ -10,7 +10,7 @@ import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.select.SelectColumn; import io.deephaven.engine.table.impl.select.SelectColumnFactory; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.proto.backplane.grpc.BatchTableRequest; import io.deephaven.proto.backplane.grpc.SelectOrUpdateRequest; import io.deephaven.server.session.SessionState; @@ -53,8 +53,9 @@ public Table create(final SelectOrUpdateRequest request, ColumnExpressionValidator.validateColumnExpressions(expressions, columnSpecs, parent); if (parent.isRefreshing() && requiresSharedLock) { - return UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> realTableOperation.apply(parent, Arrays.asList(expressions))); + final UpdateGraph updateGraph = parent.getUpdateGraph(); + return updateGraph.sharedLock().computeLocked( + () -> realTableOperation.apply(parent, Arrays.asList(expressions))); } return realTableOperation.apply(parent, Arrays.asList(expressions)); diff --git a/server/src/main/java/io/deephaven/server/table/ops/WhereInGrpcImpl.java b/server/src/main/java/io/deephaven/server/table/ops/WhereInGrpcImpl.java index 36630c7a22c..33bd40b7869 100644 --- a/server/src/main/java/io/deephaven/server/table/ops/WhereInGrpcImpl.java +++ b/server/src/main/java/io/deephaven/server/table/ops/WhereInGrpcImpl.java @@ -7,7 +7,6 @@ import io.deephaven.auth.codegen.impl.TableServiceContextualAuthWiring; import io.deephaven.base.verify.Assert; import io.deephaven.engine.table.Table; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.proto.backplane.grpc.BatchTableRequest; import io.deephaven.proto.backplane.grpc.TableReference; import io.deephaven.proto.backplane.grpc.WhereInRequest; @@ -20,7 +19,6 @@ import javax.inject.Inject; import javax.inject.Singleton; import java.util.List; -import java.util.Objects; @Singleton public class WhereInGrpcImpl extends GrpcTableOperation { @@ -29,15 +27,10 @@ private static List refs(WhereInRequest request) { return List.of(request.getLeftId(), request.getRightId()); } - private final UpdateGraphProcessor updateGraphProcessor; - @Inject - public WhereInGrpcImpl( - final TableServiceContextualAuthWiring authWiring, - final UpdateGraphProcessor updateGraphProcessor) { + public WhereInGrpcImpl(final TableServiceContextualAuthWiring authWiring) { super(authWiring::checkPermissionWhereIn, BatchTableRequest.Operation::getWhereIn, WhereInRequest::getResultId, WhereInGrpcImpl::refs); - this.updateGraphProcessor = Objects.requireNonNull(updateGraphProcessor); } @Override @@ -57,12 +50,17 @@ public final Table create(final WhereInRequest request, final List columnsToMatch = JoinMatch.from(request.getColumnsToMatchList()); - try (final SafeCloseable _lock = lock(left, right)) { + try (final SafeCloseable ignored = lock(left, right)) { return request.getInverted() ? left.whereNotIn(right, columnsToMatch) : left.whereIn(right, columnsToMatch); } } private SafeCloseable lock(Table left, Table right) { - return left.isRefreshing() || right.isRefreshing() ? updateGraphProcessor.sharedLock().lockCloseable() : null; + if (left.isRefreshing()) { + return left.getUpdateGraph().sharedLock().lockCloseable(); + } else if (right.isRefreshing()) { + return right.getUpdateGraph().sharedLock().lockCloseable(); + } + return null; } } diff --git a/server/src/test/java/io/deephaven/server/appmode/ApplicationServiceGrpcImplTest.java b/server/src/test/java/io/deephaven/server/appmode/ApplicationServiceGrpcImplTest.java index d983846b0df..1b6178f23b0 100644 --- a/server/src/test/java/io/deephaven/server/appmode/ApplicationServiceGrpcImplTest.java +++ b/server/src/test/java/io/deephaven/server/appmode/ApplicationServiceGrpcImplTest.java @@ -3,8 +3,9 @@ */ package io.deephaven.server.appmode; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.TestExecutionContext; -import io.deephaven.engine.liveness.LivenessScopeStack; +import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.NoLanguageDeephavenSession; import io.deephaven.engine.util.ScriptSession; import io.deephaven.plugin.type.ObjectTypeLookup; @@ -15,12 +16,12 @@ import io.deephaven.server.session.SessionServiceGrpcImpl; import io.deephaven.server.session.SessionState; import io.deephaven.server.util.TestControlledScheduler; -import io.deephaven.util.SafeCloseable; import io.deephaven.auth.AuthContext; import io.grpc.Context; import io.grpc.stub.StreamObserver; import org.junit.After; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; import java.util.Collections; @@ -29,17 +30,19 @@ import static org.junit.Assert.assertEquals; public class ApplicationServiceGrpcImplTest { + + @Rule + public final EngineCleanup framework = new EngineCleanup(); + private static final long TOKEN_EXPIRE_MS = 1_000_000; private static final AuthContext AUTH_CONTEXT = new AuthContext.SuperUser(); - private SafeCloseable livenessScope; private TestControlledScheduler scheduler; private SessionService sessionService; private ApplicationServiceGrpcImpl applicationServiceGrpcImpl; @Before public void setup() { - livenessScope = LivenessScopeStack.open(); scheduler = new TestControlledScheduler(); sessionService = new SessionService(scheduler, authContext -> new SessionState(scheduler, TestExecutionContext::createForUnitTests, authContext), @@ -50,11 +53,8 @@ public void setup() { @After public void teardown() { - livenessScope.close(); - scheduler = null; sessionService = null; - livenessScope = null; } /** @@ -85,7 +85,8 @@ public void onListFieldsSubscribeFailedObserver() { faultyObserverShouldStillWork.set(false); // trigger a change - ScriptSession scriptSession = new NoLanguageDeephavenSession(); + ScriptSession scriptSession = new NoLanguageDeephavenSession( + ExecutionContext.getDefaultContext().getUpdateGraph()); scriptSession.setVariable("key", "hello world"); ScriptSession.Changes changes = new ScriptSession.Changes(); changes.created.put("key", "Object"); diff --git a/server/src/test/java/io/deephaven/server/appmode/ApplicationTest.java b/server/src/test/java/io/deephaven/server/appmode/ApplicationTest.java index 2a54def4cb2..ac17a47a807 100644 --- a/server/src/test/java/io/deephaven/server/appmode/ApplicationTest.java +++ b/server/src/test/java/io/deephaven/server/appmode/ApplicationTest.java @@ -5,6 +5,7 @@ import io.deephaven.appmode.ApplicationState; import io.deephaven.appmode.Field; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.table.Table; import io.deephaven.engine.testutil.junit4.EngineCleanup; import io.deephaven.engine.util.AbstractScriptSession; @@ -49,7 +50,9 @@ public void app00() { @Test public void app01() throws IOException { - session = new GroovyDeephavenSession(NoOp.INSTANCE, null, GroovyDeephavenSession.RunScripts.none()); + session = new GroovyDeephavenSession( + ExecutionContext.getContext().getUpdateGraph(), NoOp.INSTANCE, null, + GroovyDeephavenSession.RunScripts.none()); ApplicationState app = ApplicationFactory.create(ApplicationConfigs.testAppDir(), ApplicationConfigs.app01(), session, new NoopStateListener()); assertThat(app.name()).isEqualTo("My Groovy Application"); @@ -61,7 +64,9 @@ public void app01() throws IOException { @Test @Ignore("TODO: deephaven-core#1741 python test needs to run in a container") public void app02() throws IOException, InterruptedException, TimeoutException { - session = new PythonDeephavenSession(NoOp.INSTANCE, null, false, PythonEvaluatorJpy.withGlobalCopy()); + session = new PythonDeephavenSession( + ExecutionContext.getDefaultContext().getUpdateGraph(), NoOp.INSTANCE, null, false, + PythonEvaluatorJpy.withGlobalCopy()); ApplicationState app = ApplicationFactory.create(ApplicationConfigs.testAppDir(), ApplicationConfigs.app02(), session, new NoopStateListener()); assertThat(app.name()).isEqualTo("My Python Application"); diff --git a/server/src/test/java/io/deephaven/server/barrage/BarrageBlinkTableTest.java b/server/src/test/java/io/deephaven/server/barrage/BarrageBlinkTableTest.java index 1ef5a3b2dfe..722770f1798 100644 --- a/server/src/test/java/io/deephaven/server/barrage/BarrageBlinkTableTest.java +++ b/server/src/test/java/io/deephaven/server/barrage/BarrageBlinkTableTest.java @@ -12,6 +12,7 @@ import io.deephaven.base.Pair; import io.deephaven.base.verify.Assert; import io.deephaven.client.impl.BarrageSubscriptionImpl.BarrageDataMarshaller; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; @@ -21,9 +22,9 @@ import io.deephaven.engine.table.impl.TableUpdateImpl; import io.deephaven.engine.table.impl.TableUpdateValidator; import io.deephaven.engine.table.impl.util.BarrageMessage; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.UpdateSourceCombiner; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; @@ -180,7 +181,8 @@ private class RemoteClient { final ByteString schemaBytes = BarrageUtil.schemaBytesFromTable(blinkTable); final Schema flatbufSchema = SchemaHelper.flatbufSchema(schemaBytes.asReadOnlyByteBuffer()); final BarrageUtil.ConvertedArrowSchema schema = BarrageUtil.convertArrowSchema(flatbufSchema); - this.barrageTable = BarrageTable.make(updateSourceCombiner, UpdateGraphProcessor.DEFAULT, + this.barrageTable = BarrageTable.make(updateSourceCombiner, + ExecutionContext.getContext().getUpdateGraph(), null, schema.tableDef, schema.attributes, viewport == null ? -1 : viewport.size()); final BarrageSubscriptionOptions options = BarrageSubscriptionOptions.builder() @@ -295,7 +297,8 @@ private void releaseBlinkRows(int numBatches) { } private void releaseBlinkRows() { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { final TableUpdateImpl update = new TableUpdateImpl(); final long lastKey = blinkRowSet.lastRowKey(); update.removed = blinkRowSet.copy(); @@ -313,7 +316,8 @@ public void testBasicBlinkSingleUpdates() { final RemoteClient client = new RemoteClient(); flushProducerTable(); // empty snapshot client.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(0, 0); for (long nr = 0; nr < NUM_ROWS / BATCH_SIZE; ++nr) { @@ -321,7 +325,7 @@ public void testBasicBlinkSingleUpdates() { flushProducerTable(); client.flushEventsToReplicatedTable(); updateSourceCombiner.assertRefreshRequested(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(nr, nr + 1); } } @@ -330,7 +334,8 @@ public void testSenderAggregates() { final RemoteClient client = new RemoteClient(); flushProducerTable(); // empty snapshot client.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(0, 0); for (long nr = 0; nr < NUM_ROWS / BATCH_SIZE / 4; ++nr) { @@ -338,7 +343,7 @@ public void testSenderAggregates() { flushProducerTable(); client.flushEventsToReplicatedTable(); updateSourceCombiner.assertRefreshRequested(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(4 * nr, 4 * (nr + 1)); } } @@ -347,7 +352,8 @@ public void testReceiverAggregates() { final RemoteClient client = new RemoteClient(); flushProducerTable(); // empty snapshot client.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(0, 0); for (long nr = 0; nr < NUM_ROWS / BATCH_SIZE / 4; ++nr) { @@ -355,7 +361,7 @@ public void testReceiverAggregates() { flushProducerTable(); client.flushEventsToReplicatedTable(); updateSourceCombiner.assertRefreshRequested(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(4 * nr, 4 * (nr + 1)); } } @@ -366,7 +372,8 @@ public void testBMPFlushesOnSub() { final RemoteClient client1 = new RemoteClient(); flushProducerTable(); // empty snapshot + release of BATCH_SIZE client1.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client1.validateBatches(0, 1); releaseBlinkRows(); // get sent to client1 @@ -376,7 +383,7 @@ public void testBMPFlushesOnSub() { client2.flushEventsToReplicatedTable(); updateSourceCombiner.assertRefreshRequested(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client1.validateBatches(1, 3); // gets before and after snap client2.validateBatches(2, 3); // gets only after snap @@ -386,7 +393,8 @@ public void testReceiverFlushesEmptyCycle() { final RemoteClient client = new RemoteClient(); flushProducerTable(); // empty snapshot client.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); updateSourceCombiner.assertRefreshRequested(); releaseBlinkRows(); @@ -394,11 +402,11 @@ public void testReceiverFlushesEmptyCycle() { Assert.eqFalse(updateSourceCombiner.refreshRequested, "refreshRequested"); client.flushEventsToReplicatedTable(); updateSourceCombiner.assertRefreshRequested(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(0, 1); updateSourceCombiner.assertRefreshRequested(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(0, 0); } @@ -407,26 +415,27 @@ public void testViewport() { final RemoteClient client = new RemoteClient(RowSetFactory.fromRange(2 * BATCH_SIZE, 3 * BATCH_SIZE - 1), null); flushProducerTable(); // empty snapshot client.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(0, 0); releaseBlinkRows(); flushProducerTable(); client.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(0, 0); releaseBlinkRows(3); flushProducerTable(); client.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(3, 4); for (long nr = 1; nr < NUM_ROWS / BATCH_SIZE / 4; ++nr) { releaseBlinkRows(4); flushProducerTable(); client.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client.validateBatches(4 * nr + 2, 4 * (nr + 1) - 1); } } @@ -438,7 +447,8 @@ public void testSimultaneousFullAndViewport() { flushProducerTable(); // empty snapshot client1.flushEventsToReplicatedTable(); client2.flushEventsToReplicatedTable(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client1.validate(sourceTable.getSubTable(RowSetFactory.empty().toTracking())); client2.validate(sourceTable.getSubTable(RowSetFactory.empty().toTracking())); @@ -448,7 +458,7 @@ public void testSimultaneousFullAndViewport() { client1.flushEventsToReplicatedTable(); client2.flushEventsToReplicatedTable(); updateSourceCombiner.assertRefreshRequested(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); client1.validateBatches(4 * nr, 4 * (nr + 1)); client2.validateBatches(4 * nr + 2, 4 * (nr + 1) - 1); } @@ -457,6 +467,10 @@ public void testSimultaneousFullAndViewport() { private static class SourceCombiner extends UpdateSourceCombiner { private boolean refreshRequested = false; + private SourceCombiner() { + super(ExecutionContext.getContext().getUpdateGraph()); + } + @Override public void requestRefresh() { refreshRequested = true; diff --git a/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java b/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java index e4bd9ccdc36..dbca5faf316 100644 --- a/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java +++ b/server/src/test/java/io/deephaven/server/barrage/BarrageMessageRoundTripTest.java @@ -10,6 +10,7 @@ import io.deephaven.base.Pair; import io.deephaven.base.verify.Assert; import io.deephaven.client.impl.BarrageSubscriptionImpl.BarrageDataMarshaller; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.rowset.*; import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.Table; @@ -19,13 +20,9 @@ import io.deephaven.engine.table.impl.TableUpdateImpl; import io.deephaven.engine.table.impl.TableUpdateValidator; import io.deephaven.engine.table.impl.util.BarrageMessage; -import io.deephaven.engine.testutil.ColumnInfo; -import io.deephaven.engine.testutil.EvalNuggetInterface; -import io.deephaven.engine.testutil.GenerateTableUpdates; -import io.deephaven.engine.testutil.TstUtils; +import io.deephaven.engine.testutil.*; import io.deephaven.engine.testutil.generator.*; import io.deephaven.engine.testutil.testcase.RefreshingTableTestCase; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.UpdateSourceCombiner; import io.deephaven.engine.util.TableDiff; import io.deephaven.engine.util.TableTools; @@ -91,7 +88,7 @@ interface Builder { @Override public void setUp() throws Exception { super.setUp(); - updateSourceCombiner = new UpdateSourceCombiner(); + updateSourceCombiner = new UpdateSourceCombiner(ExecutionContext.getContext().getUpdateGraph()); scheduler = new TestControlledScheduler(); exceptions = new ArrayDeque<>(); useDeephavenNulls = true; @@ -179,7 +176,8 @@ private class RemoteClient { this.name = name; this.barrageMessageProducer = barrageMessageProducer; - this.barrageTable = BarrageTable.make(updateSourceCombiner, UpdateGraphProcessor.DEFAULT, + this.barrageTable = BarrageTable.make(updateSourceCombiner, + ExecutionContext.getContext().getUpdateGraph(), null, barrageMessageProducer.getTableDefinition(), new HashMap<>(), viewport == null ? -1 : viewport.size()); @@ -461,7 +459,8 @@ void runTest(final Runnable simulateSourceStep) { for (final RemoteNugget nugget : nuggets) { nugget.flushClientEvents(); } - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); TstUtils.validate("", nuggetsToValidate); @@ -574,9 +573,9 @@ public void testAppendIncremental() { update.shifted = RowSetShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, - helper.random, helper.sourceTable, helper.columnInfo)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + helper.random, helper.sourceTable, helper.columnInfo)); }); }; @@ -610,9 +609,9 @@ public void testPrependIncremental() { } update.shifted = shifted.build(); - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, - helper.random, helper.sourceTable, helper.columnInfo)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + helper.random, helper.sourceTable, helper.columnInfo)); }); }; @@ -628,9 +627,12 @@ public void testPrependIncremental() { public void testRoundTripIncremental() { final Consumer runOne = helper -> { - helper.runTest(() -> UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( - () -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, - helper.size, helper.random, helper.sourceTable, helper.columnInfo))); + helper.runTest(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( + GenerateTableUpdates.DEFAULT_PROFILE, + helper.size, helper.random, helper.sourceTable, helper.columnInfo)); + }); }; for (final int size : new int[] {10, 100, 1000}) { @@ -659,9 +661,9 @@ public void testAppendIncrementalSharedProducer() { update.shifted = RowSetShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, - helper.random, helper.sourceTable, helper.columnInfo)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + helper.random, helper.sourceTable, helper.columnInfo)); }); }; @@ -695,9 +697,9 @@ public void testPrependIncrementalSharedProducer() { } update.shifted = shifted.build(); - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, - helper.random, helper.sourceTable, helper.columnInfo)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + helper.random, helper.sourceTable, helper.columnInfo)); }); }; @@ -713,9 +715,12 @@ public void testPrependIncrementalSharedProducer() { public void testRoundTripIncrementalSharedProducer() { final Consumer runOne = helper -> { - helper.runTest(() -> UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle( - () -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, - helper.size, helper.random, helper.sourceTable, helper.columnInfo))); + helper.runTest(() -> { + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( + GenerateTableUpdates.DEFAULT_PROFILE, + helper.size, helper.random, helper.sourceTable, helper.columnInfo)); + }); }; for (final int size : new int[] {10, 100, 1000}) { @@ -746,9 +751,9 @@ void runTest() { for (int pt = 0; pt < numProducerCoalesce; ++pt) { maybeChangeSub(numSteps.intValue(), rt, pt); - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, size, random, sourceTable, columnInfo)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( + GenerateTableUpdates.DEFAULT_PROFILE, size, random, sourceTable, columnInfo)); } // flush producer @@ -759,7 +764,8 @@ void runTest() { for (final RemoteNugget nugget : nuggets) { nugget.flushClientEvents(); } - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); TstUtils.validate("", nuggetsToValidate); } @@ -975,8 +981,11 @@ void createNuggetsForTableMaker(final Supplier

makeTable) { final RemoteNugget nugget = new RemoteNugget(makeTable) { @Override public void onGetSnapshot() { - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates - .generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, size, + final ControlledUpdateGraph updateGraph = + ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle( + () -> GenerateTableUpdates.generateShiftAwareTableUpdates( + GenerateTableUpdates.DEFAULT_PROFILE, size, random, sourceTable, columnInfo)); } }; @@ -1083,7 +1092,9 @@ public void testUsePrevOnSnapshot() { final RemoteNugget remoteNugget = new RemoteNugget(() -> queryTable); final MutableObject remoteClient = new MutableObject<>(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + // flush producer in the middle of the cycle -- but we need a different thread to usePrev + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(queryTable, i(10, 12)); TstUtils.addToTable(queryTable, i(5, 7), col("intCol", 10, 12)); @@ -1119,7 +1130,7 @@ public void testUsePrevOnSnapshot() { // We expect two pending messages for our client: snapshot in prev and the shift update Assert.equals(remoteClient.getValue().commandQueue.size(), "remoteClient.getValue().commandQueue.size()", 2); remoteNugget.flushClientEvents(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); // validate remoteNugget.validate("post flush"); @@ -1140,14 +1151,15 @@ public void testRegressModificationsInPrevView() { // Obtain snapshot of original viewport. flushProducerTable(); remoteNugget.flushClientEvents(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); remoteNugget.validate("original viewport"); // Change viewport without overlap. remoteClient.setViewport(RowSetFactory.fromRange(0, 1)); // Modify row that is outside of new viewport but in original. - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(queryTable, i(12), col("intCol", 13)); queryTable.notifyListeners(new TableUpdateImpl( @@ -1162,7 +1174,7 @@ public void testRegressModificationsInPrevView() { flushProducerTable(); // Add rows to shift modified row into new viewport. - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.removeRows(queryTable, i(5)); queryTable.notifyListeners(new TableUpdateImpl( @@ -1177,7 +1189,7 @@ public void testRegressModificationsInPrevView() { Assert.equals(remoteClient.commandQueue.size(), "remoteClient.getValue().commandQueue.size()", 3); // mod, add, // snaphot remoteNugget.flushClientEvents(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); remoteNugget.validate("new viewport with modification"); } @@ -1204,7 +1216,8 @@ public void testCoalescingLargeUpdates() { // Obtain snapshot of original viewport. flushProducerTable(); remoteNugget.flushClientEvents(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); remoteNugget.validate("original viewport"); // Add all of our new rows spread over multiple deltas. @@ -1217,7 +1230,7 @@ public void testCoalescingLargeUpdates() { values[jj / numDeltas] = ii; } final RowSet newRows = newRowsBuilder.build(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(queryTable, newRows, col("intCol", values)); queryTable.notifyListeners(new TableUpdateImpl( newRows, @@ -1230,7 +1243,7 @@ public void testCoalescingLargeUpdates() { // Coalesce these to ensure mappings larger than a single chunk are handled correctly. flushProducerTable(); remoteNugget.flushClientEvents(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); remoteNugget.validate("large add rows update"); // Modify all of our rows spread over multiple deltas. @@ -1242,7 +1255,7 @@ public void testCoalescingLargeUpdates() { values[jj / numDeltas] = numDeltas + ii; } final RowSet modRows = modRowsBuilder.build(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { TstUtils.addToTable(queryTable, modRows, col("intCol", values)); queryTable.notifyListeners(new TableUpdateImpl( RowSetFactory.empty(), @@ -1255,7 +1268,7 @@ public void testCoalescingLargeUpdates() { // Coalesce these to ensure mappings larger than a single chunk are handled correctly. flushProducerTable(); remoteNugget.flushClientEvents(); - UpdateGraphProcessor.DEFAULT.runWithinUnitTestCycle(updateSourceCombiner::run); + updateGraph.runWithinUnitTestCycle(updateSourceCombiner::run); remoteNugget.validate("large mod rows update"); } @@ -1315,9 +1328,9 @@ public void createTable() { update.shifted = RowSetShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, - helper.random, helper.sourceTable, helper.columnInfo)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + helper.random, helper.sourceTable, helper.columnInfo)); }); } } @@ -1379,9 +1392,9 @@ public void createTable() { update.shifted = RowSetShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - UpdateGraphProcessor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, - helper.random, helper.sourceTable, helper.columnInfo)); + final ControlledUpdateGraph updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + helper.random, helper.sourceTable, helper.columnInfo)); }); } } diff --git a/server/src/test/java/io/deephaven/server/runner/DeephavenApiServerTestBase.java b/server/src/test/java/io/deephaven/server/runner/DeephavenApiServerTestBase.java index edcf72c1824..9700c00b29f 100644 --- a/server/src/test/java/io/deephaven/server/runner/DeephavenApiServerTestBase.java +++ b/server/src/test/java/io/deephaven/server/runner/DeephavenApiServerTestBase.java @@ -6,9 +6,10 @@ import dagger.BindsInstance; import dagger.Component; import io.deephaven.client.ClientDefaultsModule; +import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.liveness.LivenessScope; import io.deephaven.engine.liveness.LivenessScopeStack; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import io.deephaven.io.logger.LogBuffer; import io.deephaven.io.logger.LogBufferGlobal; import io.deephaven.proto.DeephavenChannel; @@ -30,6 +31,7 @@ import javax.inject.Singleton; import java.io.PrintStream; import java.time.Duration; +import java.util.Optional; import java.util.concurrent.TimeUnit; /** @@ -74,6 +76,8 @@ interface Builder { @Rule public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); + private SafeCloseable executionContext; + private TestComponent serverComponent; private LogBuffer logBuffer; private DeephavenApiServer server; @@ -81,12 +85,12 @@ interface Builder { @Before public void setUp() throws Exception { - if (UpdateGraphProcessor.DEFAULT.isUnitTestModeAllowed()) { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); - } - logBuffer = new LogBuffer(128); + { + // Prevent previous failures from cascading + final Optional maybeOldLogBuffer = LogBufferGlobal.getInstance(); + maybeOldLogBuffer.ifPresent(LogBufferGlobal::clear); + } LogBufferGlobal.setInstance(logBuffer); final DeephavenApiServerTestConfig config = DeephavenApiServerTestConfig.builder() @@ -103,6 +107,14 @@ public void setUp() throws Exception { .build(); server = serverComponent.getServer(); + + final PeriodicUpdateGraph updateGraph = server.getUpdateGraph().cast(); + executionContext = TestExecutionContext.createForUnitTests().withUpdateGraph(updateGraph).open(); + if (updateGraph.isUnitTestModeAllowed()) { + updateGraph.enableUnitTestMode(); + updateGraph.resetForUnitTests(false); + } + server.startForUnitTests(); scopeCloseable = LivenessScopeStack.open(new LivenessScope(true), true); @@ -119,9 +131,11 @@ public void tearDown() throws Exception { LogBufferGlobal.clear(logBuffer); } - if (UpdateGraphProcessor.DEFAULT.isUnitTestModeAllowed()) { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + final PeriodicUpdateGraph updateGraph = server.getUpdateGraph().cast(); + if (updateGraph.isUnitTestModeAllowed()) { + updateGraph.resetForUnitTests(true); } + executionContext.close(); } public DeephavenApiServer server() { diff --git a/server/src/test/java/io/deephaven/server/runner/ExecutionContextUnitTestModule.java b/server/src/test/java/io/deephaven/server/runner/ExecutionContextUnitTestModule.java index e3e6e9e535e..9fbc36a5bda 100644 --- a/server/src/test/java/io/deephaven/server/runner/ExecutionContextUnitTestModule.java +++ b/server/src/test/java/io/deephaven/server/runner/ExecutionContextUnitTestModule.java @@ -4,6 +4,8 @@ import dagger.Provides; import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.TestExecutionContext; +import io.deephaven.engine.updategraph.UpdateGraph; +import io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph; import javax.inject.Singleton; @@ -12,6 +14,9 @@ public class ExecutionContextUnitTestModule { @Provides @Singleton public ExecutionContext provideExecutionContext() { - return TestExecutionContext.createForUnitTests(); + final UpdateGraph updateGraph = PeriodicUpdateGraph.newBuilder("TEST") + .numUpdateThreads(PeriodicUpdateGraph.NUM_THREADS_DEFAULT_UPDATE_GRAPH) + .existingOrBuild(); + return TestExecutionContext.createForUnitTests().withUpdateGraph(updateGraph); } } diff --git a/server/src/test/java/io/deephaven/server/session/SessionStateTest.java b/server/src/test/java/io/deephaven/server/session/SessionStateTest.java index e4ec8876add..0a279200246 100644 --- a/server/src/test/java/io/deephaven/server/session/SessionStateTest.java +++ b/server/src/test/java/io/deephaven/server/session/SessionStateTest.java @@ -22,10 +22,7 @@ import io.grpc.stub.StreamObserver; import org.apache.commons.lang3.mutable.MutableBoolean; import org.apache.commons.lang3.mutable.MutableObject; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.*; import java.lang.ref.WeakReference; import java.util.ArrayList; @@ -50,6 +47,7 @@ public class SessionStateTest { private static final AuthContext AUTH_CONTEXT = new AuthContext.SuperUser(); + private SafeCloseable executionContext; private LivenessScope livenessScope; private TestControlledScheduler scheduler; private SessionState session; @@ -57,6 +55,7 @@ public class SessionStateTest { @Before public void setup() { + executionContext = TestExecutionContext.createForUnitTests().open(); livenessScope = new LivenessScope(); LivenessScopeStack.push(livenessScope); scheduler = new TestControlledScheduler(); @@ -73,6 +72,7 @@ public void teardown() { livenessScope = null; scheduler = null; session = null; + executionContext.close(); } @Test diff --git a/server/src/test/java/io/deephaven/server/table/ExportTableUpdateListenerTest.java b/server/src/test/java/io/deephaven/server/table/ExportTableUpdateListenerTest.java index 004ee6e8ca1..1efa6878b07 100644 --- a/server/src/test/java/io/deephaven/server/table/ExportTableUpdateListenerTest.java +++ b/server/src/test/java/io/deephaven/server/table/ExportTableUpdateListenerTest.java @@ -6,6 +6,7 @@ import io.deephaven.UncheckedDeephavenException; import io.deephaven.auth.AuthContext; import io.deephaven.base.verify.Assert; +import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.context.TestExecutionContext; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.rowset.RowSetFactory; @@ -13,8 +14,8 @@ import io.deephaven.engine.table.ModifiedColumnSet; import io.deephaven.engine.table.impl.QueryTable; import io.deephaven.engine.table.impl.TableUpdateImpl; +import io.deephaven.engine.testutil.ControlledUpdateGraph; import io.deephaven.engine.testutil.TstUtils; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.systemicmarking.SystemicObjectTracker; import io.deephaven.proto.backplane.grpc.ExportedTableUpdateMessage; import io.deephaven.proto.backplane.grpc.Ticket; @@ -40,15 +41,19 @@ public class ExportTableUpdateListenerTest { private static final AuthContext AUTH_CONTEXT = new AuthContext.SuperUser(); - private static final UpdateGraphProcessor updateGraphProcessor = UpdateGraphProcessor.DEFAULT; + private SafeCloseable executionContext; + private ControlledUpdateGraph updateGraph; + private TestControlledScheduler scheduler; private TestSessionState session; private QueuingResponseObserver observer; @Before public void setup() { - UpdateGraphProcessor.DEFAULT.enableUnitTestMode(); - UpdateGraphProcessor.DEFAULT.resetForUnitTests(false); + executionContext = TestExecutionContext.createForUnitTests().open(); + updateGraph = ExecutionContext.getContext().getUpdateGraph().cast(); + updateGraph.enableUnitTestMode(); + updateGraph.resetForUnitTests(false); SystemicObjectTracker.markThreadSystemic(); scheduler = new TestControlledScheduler(); @@ -58,11 +63,13 @@ public void setup() { @After public void tearDown() { - UpdateGraphProcessor.DEFAULT.resetForUnitTests(true); + updateGraph.resetForUnitTests(true); scheduler = null; session = null; observer = null; + + executionContext.close(); } @Test @@ -210,7 +217,7 @@ public void testPropagatesError() { // validate we receive an initial table size update expectSizes(t1.getExportId(), 42); - updateGraphProcessor.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { src.notifyListenersOnError(new RuntimeException("awful error occurred!"), null); }); @@ -278,7 +285,8 @@ public void testTableSizeUsesPrev() { expectNoMessage(); // export mid-tick - updateGraphProcessor.runWithinUnitTestCycle(() -> { + // Must be off-thread to use concurrent instantiation + updateGraph.runWithinUnitTestCycle(() -> { final TableUpdateImpl update = new TableUpdateImpl(); update.added = RowSetFactory.fromRange(src.getRowSet().lastRowKey() + 1, src.getRowSet().lastRowKey() + 42); update.removed = i(); @@ -308,7 +316,7 @@ public void testTableSizeUsesPrev() { } private void addRowsToSource(final QueryTable src, final long nRows) { - updateGraphProcessor.runWithinUnitTestCycle(() -> { + updateGraph.runWithinUnitTestCycle(() -> { final TableUpdateImpl update = new TableUpdateImpl(); update.added = RowSetFactory.fromRange(src.getRowSet().lastRowKey() + 1, src.getRowSet().lastRowKey() + nRows); @@ -332,8 +340,9 @@ private void expectSizes(final Ticket exportId, final long... sizes) { } private void expectNoMessage() { - updateGraphProcessor.runWithinUnitTestCycle(() -> { - }); // flush our terminal notification + // flush our terminal notification + updateGraph.runWithinUnitTestCycle(() -> { + }); final ExportedTableUpdateMessage batch = observer.msgQueue.poll(); Assert.eqNull(batch, "batch"); } diff --git a/server/test/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java b/server/test/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java index aebb96adc52..2d080c3ed74 100644 --- a/server/test/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java +++ b/server/test/src/main/java/io/deephaven/server/test/FlightMessageRoundTripTest.java @@ -26,8 +26,8 @@ import io.deephaven.engine.context.ExecutionContext; import io.deephaven.engine.liveness.LivenessScopeStack; import io.deephaven.engine.table.Table; +import io.deephaven.engine.updategraph.UpdateGraph; import io.deephaven.engine.table.impl.DataAccessHelpers; -import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.util.AbstractScriptSession; import io.deephaven.engine.util.NoLanguageDeephavenSession; import io.deephaven.engine.util.ScriptSession; @@ -113,8 +113,8 @@ TicketResolver ticketResolver(ScopeTicketResolver resolver) { @Singleton @Provides - AbstractScriptSession provideAbstractScriptSession() { - return new NoLanguageDeephavenSession("non-script-session"); + AbstractScriptSession provideAbstractScriptSession(final UpdateGraph updateGraph) { + return new NoLanguageDeephavenSession(updateGraph, "non-script-session"); } @Provides @@ -167,8 +167,8 @@ TestAuthorizationProvider provideTestAuthorizationProvider() { @Provides @Singleton - static UpdateGraphProcessor provideUpdateGraphProcessor() { - return UpdateGraphProcessor.DEFAULT; + static UpdateGraph provideUpdateGraph() { + return ExecutionContext.getContext().getUpdateGraph(); } } @@ -609,8 +609,8 @@ public void testFlightInfo() { final String tickingTableName = "flightInfoTestTicking"; final Table table = TableTools.emptyTable(10).update("I = i"); - final Table tickingTable = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.timeTable(1_000_000).update("I = i")); + final Table tickingTable = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> TableTools.timeTable(1_000_000).update("I = i")); // stuff table into the scope scriptSession.setVariable(staticTableName, table); @@ -640,8 +640,8 @@ public void testGetSchema() { final String tickingTableName = "flightInfoTestTicking"; final Table table = TableTools.emptyTable(10).update("I = i"); - final Table tickingTable = UpdateGraphProcessor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.timeTable(1_000_000).update("I = i")); + final Table tickingTable = ExecutionContext.getContext().getUpdateGraph().sharedLock().computeLocked( + () -> TableTools.timeTable(1_000_000).update("I = i")); try (final SafeCloseable ignored = LivenessScopeStack.open(scriptSession, false)) { // stuff table into the scope diff --git a/sphinx/source/conf.py b/sphinx/source/conf.py index 5b9cc0d02b6..780d8fb815f 100644 --- a/sphinx/source/conf.py +++ b/sphinx/source/conf.py @@ -103,7 +103,10 @@ import jpy py_scope_jpy = jpy.get_type("io.deephaven.engine.util.PythonScopeJpyImpl").ofMainGlobals() -py_dh_session = jpy.get_type("io.deephaven.integrations.python.PythonDeephavenSession")(py_scope_jpy) +_JUpdateGraph = jpy.get_type("io.deephaven.engine.updategraph.impl.PeriodicUpdateGraph") +docs_update_graph = _JUpdateGraph.newBuilder("PYTHON_DOCS").build() +_JPythonScriptSession = jpy.get_type("io.deephaven.integrations.python.PythonDeephavenSession") +py_dh_session = _JPythonScriptSession(docs_update_graph, py_scope_jpy) py_dh_session.getExecutionContext().open() diff --git a/table-api/src/main/java/io/deephaven/api/util/ConcurrentMethod.java b/table-api/src/main/java/io/deephaven/api/util/ConcurrentMethod.java index e22e7a2131f..ea50fe4a2f3 100644 --- a/table-api/src/main/java/io/deephaven/api/util/ConcurrentMethod.java +++ b/table-api/src/main/java/io/deephaven/api/util/ConcurrentMethod.java @@ -6,8 +6,8 @@ import java.lang.annotation.*; /** - * Indicates that the annotated method should be executed concurrently with respect to the {@code UpdateGraphProcessor} - * (UGP). Concurrent execution will not acquire the UGP lock before invocation, and will be run concurrently with other + * Indicates that the annotated method should be executed concurrently with respect to the {@code UpdateGraph} (UGP). + * Concurrent execution will not acquire the UGP lock before invocation, and will be run concurrently with other * annotated methods whenever possible. */ @Documented