Skip to content

Commit ed0c4cb

Browse files
EddyLXJmeta-codesync[bot]
authored andcommitted
Adding python api to support sync trigger evict (#4984)
Summary: Pull Request resolved: #4984 X-link: https://github.com/facebookresearch/FBGEMM/pull/1997 As title, `has_running_evict` and `trigger_feature_evict` are needed to support sync trigger eviction Reviewed By: kathyxuyy Differential Revision: D83896308 fbshipit-source-id: 2c68a691ff66ca68c225528cdc7a8c7d50aab516
1 parent 9a05d32 commit ed0c4cb

File tree

6 files changed

+44
-10
lines changed

6 files changed

+44
-10
lines changed

fbgemm_gpu/src/dram_kv_embedding_cache/dram_kv_embedding_cache.h

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1175,17 +1175,8 @@ class DramKVEmbeddingCache : public kv_db::EmbeddingKVDB {
11751175

11761176
void compact() override {}
11771177

1178-
void trigger_feature_evict(
1179-
std::optional<uint32_t> inplace_update_ts = std::nullopt) {
1178+
void trigger_feature_evict() {
11801179
if (feature_evict_) {
1181-
if (inplace_update_ts.has_value() &&
1182-
feature_evict_config_.value()->trigger_strategy_ ==
1183-
EvictTriggerStrategy::BY_TIMESTAMP_THRESHOLD) {
1184-
auto* tt_evict = dynamic_cast<TimeThresholdBasedEvict<weight_type>*>(
1185-
feature_evict_.get());
1186-
CHECK(tt_evict != nullptr);
1187-
tt_evict->set_eviction_timestamp_threshold(inplace_update_ts.value());
1188-
}
11891180
feature_evict_->trigger_evict();
11901181
}
11911182
}
@@ -1269,6 +1260,13 @@ class DramKVEmbeddingCache : public kv_db::EmbeddingKVDB {
12691260
}
12701261
}
12711262

1263+
bool is_evicting() override {
1264+
if (feature_evict_) {
1265+
return feature_evict_->is_evicting();
1266+
}
1267+
return false;
1268+
}
1269+
12721270
// for inference only, this logs the total hit/miss count
12731271
// this should be called at the end of full/delta snapshot chunk by chunk
12741272
// update

fbgemm_gpu/src/dram_kv_embedding_cache/dram_kv_embedding_cache_wrapper.h

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,14 @@ class DramKVEmbeddingCacheWrapper : public torch::jit::CustomClassHolder {
179179
impl_->set_backend_return_whole_row(backend_return_whole_row);
180180
}
181181

182+
void trigger_feature_evict() {
183+
impl_->trigger_feature_evict();
184+
}
185+
186+
bool is_evicting() {
187+
return impl_->is_evicting();
188+
}
189+
182190
void set_feature_score_metadata_cuda(
183191
at::Tensor indices,
184192
at::Tensor count,

fbgemm_gpu/src/ssd_split_embeddings_cache/embedding_rocksdb_wrapper.h

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,14 @@ class EmbeddingRocksDBWrapper : public torch::jit::CustomClassHolder {
236236
impl_->set_backend_return_whole_row(backend_return_whole_row);
237237
}
238238

239+
void trigger_feature_evict() {
240+
impl_->trigger_feature_evict();
241+
}
242+
243+
bool is_evicting() {
244+
return impl_->is_evicting();
245+
}
246+
239247
private:
240248
friend class KVTensorWrapper;
241249

fbgemm_gpu/src/ssd_split_embeddings_cache/kv_db_table_batched_embeddings.cpp

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -383,6 +383,14 @@ void EmbeddingKVDB::set_backend_return_whole_row(
383383
return;
384384
}
385385

386+
void EmbeddingKVDB::trigger_feature_evict() {
387+
return;
388+
}
389+
390+
bool EmbeddingKVDB::is_evicting() {
391+
return false;
392+
}
393+
386394
void EmbeddingKVDB::set(
387395
const at::Tensor& indices,
388396
const at::Tensor& weights,

fbgemm_gpu/src/ssd_split_embeddings_cache/kv_db_table_batched_embeddings.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -301,6 +301,10 @@ class EmbeddingKVDB : public std::enable_shared_from_this<EmbeddingKVDB> {
301301
FBEXCEPTION("Not implemented");
302302
}
303303

304+
virtual void trigger_feature_evict();
305+
306+
virtual bool is_evicting();
307+
304308
/**
305309
* @brief need to support set backend_return_whole_row from frontend
306310
* if one model changed from SSD to DRAM, or vice versa we need to

fbgemm_gpu/src/ssd_split_embeddings_cache/ssd_split_table_batched_embeddings.cpp

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -880,6 +880,10 @@ static auto embedding_rocks_db_wrapper =
880880
{
881881
torch::arg("backend_return_whole_row"),
882882
})
883+
.def(
884+
"trigger_feature_evict",
885+
&EmbeddingRocksDBWrapper::trigger_feature_evict)
886+
.def("is_evicting", &EmbeddingRocksDBWrapper::is_evicting)
883887
.def("stream_sync_cuda", &EmbeddingRocksDBWrapper::stream_sync_cuda)
884888
.def("get_cuda", &EmbeddingRocksDBWrapper::get_cuda)
885889
.def("compact", &EmbeddingRocksDBWrapper::compact)
@@ -980,6 +984,10 @@ static auto dram_kv_embedding_cache_wrapper =
980984
{
981985
torch::arg("backend_return_whole_row"),
982986
})
987+
.def(
988+
"trigger_feature_evict",
989+
&DramKVEmbeddingCacheWrapper::trigger_feature_evict)
990+
.def("is_evicting", &DramKVEmbeddingCacheWrapper::is_evicting)
983991
.def("set", &DramKVEmbeddingCacheWrapper::set)
984992
.def(
985993
"set_range_to_storage",

0 commit comments

Comments
 (0)