Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix Netty deprecation warnings in transport-netty4 module ([#20233](https://github.com/opensearch-project/OpenSearch/pull/20233))
- Fix snapshot restore when an index sort is present ([#20284](https://github.com/opensearch-project/OpenSearch/pull/20284))
- Fix SearchPhaseExecutionException to properly initCause ([#20320](https://github.com/opensearch-project/OpenSearch/pull/20320))
- Remove child level directory on refresh for CompositeIndexWriter ([#20326](https://github.com/opensearch-project/OpenSearch/pull/20326))

### Dependencies
- Bump `com.google.auth:google-auth-library-oauth2-http` from 1.38.0 to 1.41.0 ([#20183](https://github.com/opensearch-project/OpenSearch/pull/20183))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
import org.apache.lucene.index.Term;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.util.BytesRef;
import org.opensearch.OpenSearchException;
import org.opensearch.common.CheckedBiFunction;
Expand All @@ -39,6 +41,7 @@

import java.io.Closeable;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
Expand Down Expand Up @@ -540,12 +543,28 @@ private void refreshDocumentsForParentDirectory(CriteriaBasedIndexWriterLookup o

if (!directoryToCombine.isEmpty()) {
accumulatingIndexWriter.addIndexes(directoryToCombine.toArray(new Directory[0]));
Path[] childDirectoryPath = directoryToCombine.stream()
.map(directory -> getLocalFSDirectory(directory).getDirectory())
.toArray(Path[]::new);
IOUtils.closeWhileHandlingException(directoryToCombine);
IOUtils.rm(childDirectoryPath);
}

deleteDummyTombstoneEntry();
}

private FSDirectory getLocalFSDirectory(Directory localDirectory) {
FSDirectory localFSDirectory;
if (localDirectory instanceof FSDirectory) {
localFSDirectory = (FSDirectory) localDirectory;
} else {
// In this case it should be a FilterDirectory wrapped over FSDirectory as per above validation.
localFSDirectory = (FSDirectory) (((FilterDirectory) localDirectory).getDelegate());
}
Comment on lines +558 to +563
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should assert that it is always an FSDirectory since we use createFSDirectory method from store


return localFSDirectory;
}
Comment on lines +556 to +566
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Add defensive checks to prevent ClassCastException.

The method assumes the directory is either FSDirectory or a single-level FilterDirectory wrapping an FSDirectory, but lacks validation. This could fail if:

  • The directory is null
  • The directory is neither FSDirectory nor FilterDirectory (line 560 cast fails)
  • FilterDirectory wraps something other than FSDirectory (line 560 cast fails)
  • Multiple layers of FilterDirectory exist (only one level is unwrapped)

The comment mentions "as per above validation" but no such validation exists.

🔎 Proposed defensive implementation
 private FSDirectory getLocalFSDirectory(Directory localDirectory) {
+    if (localDirectory == null) {
+        throw new IllegalArgumentException("localDirectory cannot be null");
+    }
+    
     FSDirectory localFSDirectory;
     if (localDirectory instanceof FSDirectory) {
         localFSDirectory = (FSDirectory) localDirectory;
-    } else {
-        // In this case it should be a FilterDirectory wrapped over FSDirectory as per above validation.
+    } else if (localDirectory instanceof FilterDirectory) {
+        // Unwrap FilterDirectory to get the delegate
+        Directory delegate = ((FilterDirectory) localDirectory).getDelegate();
+        // Recursively unwrap in case of multiple FilterDirectory layers
+        if (delegate instanceof FilterDirectory || delegate instanceof FSDirectory) {
+            localFSDirectory = getLocalFSDirectory(delegate);
+        } else {
+            throw new IllegalArgumentException(
+                "Expected FSDirectory but FilterDirectory wraps: " + delegate.getClass().getName()
+            );
+        }
+    } else {
+        throw new IllegalArgumentException(
+            "Expected FSDirectory or FilterDirectory but got: " + localDirectory.getClass().getName()
+        );
-        localFSDirectory = (FSDirectory) (((FilterDirectory) localDirectory).getDelegate());
     }
     
     return localFSDirectory;
 }
🤖 Prompt for AI Agents
In server/src/main/java/org/opensearch/index/engine/CompositeIndexWriter.java
around lines 554 to 564, the method unconditionally casts to FSDirectory and
assumes a single FilterDirectory wrapper; add defensive checks: validate
localDirectory is not null, then iteratively unwrap while localDirectory is an
instance of FilterDirectory to reach the delegate, and after unwrapping assert
the resulting object is an instance of FSDirectory; if it is not, throw an
IllegalArgumentException (or another appropriate runtime exception) with a clear
message including the actual class encountered so callers can diagnose the
mismatch.


private void deleteDummyTombstoneEntry() throws IOException {
Term uid = new Term(IdFieldMapper.NAME, DUMMY_TOMBSTONE_DOC_ID);
accumulatingIndexWriter.deleteDocuments(uid);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@
import org.opensearch.index.mapper.ParsedDocument;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
Expand Down Expand Up @@ -187,8 +189,49 @@ public void testUnableToObtainLockOnActiveLookupWhenWriteLockDuringIndexing() th
writer.join();
}

public void testConcurrentIndexingDuringRefresh() throws IOException, InterruptedException {
public void testChildDirectoryDeletedPostRefresh() throws IOException, InterruptedException {
final EngineConfig engineConfig = config();
Path dataPath = engineConfig.getStore().shardPath().resolveIndex();
CompositeIndexWriter compositeIndexWriter = null;

try {
compositeIndexWriter = new CompositeIndexWriter(
engineConfig,
createWriter(),
newSoftDeletesPolicy(),
softDeletesField,
indexWriterFactory
);

Engine.Index operation = indexForDoc(createParsedDoc("id", null, DEFAULT_CRITERIA));
try (Releasable ignore1 = compositeIndexWriter.acquireLock(operation.uid().bytes())) {
compositeIndexWriter.addDocuments(operation.docs(), operation.uid());
}

operation = indexForDoc(createParsedDoc("id2", null, "testingNewCriteria"));
try (Releasable ignore1 = compositeIndexWriter.acquireLock(operation.uid().bytes())) {
compositeIndexWriter.addDocuments(operation.docs(), operation.uid());
}

compositeIndexWriter.beforeRefresh();
compositeIndexWriter.afterRefresh(true);

long directoryCount = Files.find(
dataPath,
1,
(path, attributes) -> attributes.isDirectory() == true && path.endsWith("extra0") == false
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nit] Add a comment explaining why extra0 needs to be removed [ExtrasFS]

).count() - 1;
// Ensure no child directory is pending here.
assertEquals(0, directoryCount);
} finally {
if (compositeIndexWriter != null) {
IOUtils.closeWhileHandlingException(compositeIndexWriter);
}
}

}

public void testConcurrentIndexingDuringRefresh() throws IOException, InterruptedException {
CompositeIndexWriter compositeIndexWriter = new CompositeIndexWriter(
config(),
createWriter(),
Expand Down
Loading