@@ -489,6 +489,88 @@ impl<K: DepKind> DepGraph<K> {
489
489
}
490
490
}
491
491
492
+ /// Create a node when we force-feed a value into the query cache.
493
+ /// This is used to remove cycles during type-checking const generic parameters.
494
+ ///
495
+ /// As usual in the query system, we consider the current state of the calling query
496
+ /// only depends on the list of dependencies up to now. As a consequence, the value
497
+ /// that this query gives us can only depend on those dependencies too. Therefore,
498
+ /// it is sound to use the current dependency set for the created node.
499
+ ///
500
+ /// During replay, the order of the nodes is relevant in the dependency graph.
501
+ /// So the unchanged replay will mark the caller query before trying to mark this one.
502
+ /// If there is a change to report, the caller query will be re-executed before this one.
503
+ ///
504
+ /// FIXME: If the code is changed enough for this node to be marked before requiring the
505
+ /// caller's node, we suppose that those changes will be enough to mark this node red and
506
+ /// force a recomputation using the "normal" way.
507
+ pub fn with_feed_task < Ctxt : DepContext < DepKind = K > , A : Debug , R : Debug > (
508
+ & self ,
509
+ node : DepNode < K > ,
510
+ cx : Ctxt ,
511
+ key : A ,
512
+ result : & R ,
513
+ hash_result : fn ( & mut StableHashingContext < ' _ > , & R ) -> Fingerprint ,
514
+ ) -> DepNodeIndex {
515
+ if let Some ( data) = self . data . as_ref ( ) {
516
+ if let Some ( dep_node_index) = self . dep_node_index_of_opt ( & node) {
517
+ #[ cfg( debug_assertions) ]
518
+ {
519
+ let hashing_timer = cx. profiler ( ) . incr_result_hashing ( ) ;
520
+ let current_fingerprint =
521
+ cx. with_stable_hashing_context ( |mut hcx| hash_result ( & mut hcx, result) ) ;
522
+ hashing_timer. finish_with_query_invocation_id ( dep_node_index. into ( ) ) ;
523
+ data. current . record_edge ( dep_node_index, node, current_fingerprint) ;
524
+ }
525
+
526
+ return dep_node_index;
527
+ }
528
+
529
+ let mut edges = SmallVec :: new ( ) ;
530
+ K :: read_deps ( |task_deps| match task_deps {
531
+ TaskDepsRef :: Allow ( deps) => edges. extend ( deps. lock ( ) . reads . iter ( ) . copied ( ) ) ,
532
+ TaskDepsRef :: Ignore | TaskDepsRef :: Forbid => {
533
+ panic ! ( "Cannot summarize when dependencies are not recorded." )
534
+ }
535
+ } ) ;
536
+
537
+ let hashing_timer = cx. profiler ( ) . incr_result_hashing ( ) ;
538
+ let current_fingerprint =
539
+ cx. with_stable_hashing_context ( |mut hcx| hash_result ( & mut hcx, result) ) ;
540
+
541
+ let print_status = cfg ! ( debug_assertions) && cx. sess ( ) . opts . unstable_opts . dep_tasks ;
542
+
543
+ // Intern the new `DepNode` with the dependencies up-to-now.
544
+ let ( dep_node_index, prev_and_color) = data. current . intern_node (
545
+ cx. profiler ( ) ,
546
+ & data. previous ,
547
+ node,
548
+ edges,
549
+ Some ( current_fingerprint) ,
550
+ print_status,
551
+ ) ;
552
+
553
+ hashing_timer. finish_with_query_invocation_id ( dep_node_index. into ( ) ) ;
554
+
555
+ if let Some ( ( prev_index, color) ) = prev_and_color {
556
+ debug_assert ! (
557
+ data. colors. get( prev_index) . is_none( ) ,
558
+ "DepGraph::with_task() - Duplicate DepNodeColor insertion for {key:?}" ,
559
+ ) ;
560
+
561
+ data. colors . insert ( prev_index, color) ;
562
+ }
563
+
564
+ dep_node_index
565
+ } else {
566
+ // Incremental compilation is turned off. We just execute the task
567
+ // without tracking. We still provide a dep-node index that uniquely
568
+ // identifies the task so that we have a cheap way of referring to
569
+ // the query for self-profiling.
570
+ self . next_virtual_depnode_index ( )
571
+ }
572
+ }
573
+
492
574
#[ inline]
493
575
pub fn dep_node_index_of ( & self , dep_node : & DepNode < K > ) -> DepNodeIndex {
494
576
self . dep_node_index_of_opt ( dep_node) . unwrap ( )
0 commit comments