Skip to content

Commit d452488

Browse files
committed
neurips
1 parent c0c4fcc commit d452488

File tree

2 files changed

+62
-4
lines changed

2 files changed

+62
-4
lines changed

_bibliography/references.bib

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ @inproceedings{
1010
url={https://openreview.net/forum?id=EMkrwJY2de},
1111
pdf={https://openreview.net/pdf?id=EMkrwJY2de},
1212
abstract={Message Passing Graph Neural Networks are known to suffer from two problems that are sometimes believed to be diametrically opposed: over-squashing and over-smoothing. The former results from topological bottlenecks that hamper the information flow from distant nodes and are mitigated by spectral gap maximization, primarily, by means of edge additions. However, such additions often promote over-smoothing that renders nodes of different classes less distinguishable. Inspired by the Braess phenomenon, we argue that deleting edges can address over-squashing and over-smoothing simultaneously. This insight explains how edge deletions can improve generalization, thus connecting spectral gap optimization to a seemingly disconnected objective of reducing computational resources by pruning graphs for lottery tickets. To this end, we propose a computationally effective spectral gap optimization framework to add or delete edges and demonstrate its effectiveness on the long range graph benchmark and on larger heterophilous datasets.},
13+
code={https://github.com/RelationalML/SpectralPruningBraess}
1314
}
1415

1516
@inproceedings{
@@ -23,7 +24,16 @@ @inproceedings{
2324
abstract={Graph neural networks exhibiting a rescale invariance, like GATs, obey a conservation law of its parameters, which has been exploited to derive a balanced state that induces good initial trainability. Yet, finite learning rates as used in practice topple the network out of balance during training. This effect is even more pronounced with larger learning rates that tend to induce improved generalization but make the training dynamics less robust. To support even larger learning rates, we propose to dynamically balance the network according to a different criterion, based on relative gradients, that promotes faster and better. In combination with large learning rates and gradient clipping, dynamic rebalancing significantly improves generalization on real-world data. We observe that rescaling provides us with the flexibility to control the order in which network layers are trained. This leads to novel insights into similar phenomena as grokking, which can further boost generalization performance.}
2425
}
2526

26-
@article{Hossain2024,
27+
@inproceedings{
28+
hossain2024pruning,
29+
title={Pruning neural network models for gene regulatory dynamics using data and domain knowledge},
30+
author = {Hossain, Intekhab and Fischer, Jonas and Burkholz, Rebekka and Quackenbush, John},
31+
booktitle={Thirty-eighth Conference on Neural Information Processing Systems},
32+
year={2024},
33+
abstract={The practical utility of machine learning models in the sciences often hinges on their interpretability. It is common to assess a model's merit for scientific discovery, and thus novel insights, by how well it aligns with already available domain knowledge - a dimension that is currently largely disregarded in the comparison of neural network models. While pruning can simplify deep neural network architectures and excels in identifying sparse models, as we show in the context of gene regulatory network inference, state-of-the-art techniques struggle with biologically meaningful structure learning. To address this issue, we propose DASH, a generalizable framework that guides network pruning by using domain-specific structural information in model fitting and leads to sparser, better interpretable models that are more robust to noise. Using both synthetic data with ground truth information, as well as real-world gene expression data, we show that DASH, using knowledge about gene interaction partners within the putative regulatory network, outperforms general pruning methods by a large margin and yields deeper insights into the biological systems being studied.}
34+
}
35+
36+
@article{hossain2024biologically,
2737
author = {Hossain, Intekhab and Fanfani, Viola and Fischer, Jonas and Quackenbush, John and Burkholz, Rebekka},
2838
title={Biologically informed NeuralODEs for genome-wide regulatory dynamics},
2939
journal={Genome Biology},

_site/publications/index.html

Lines changed: 51 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,7 @@ <h1 id="publications">Publications</h1>
114114
<div class="links"><a class="conf btn btn-sm z-depth-0">NeurIPS</a><a class="bibtex btn btn-sm z-depth-0" role="button">Bib</a>
115115
<a class="abstract btn btn-sm z-depth-0" role="button">Abs</a>
116116
<a href="https://openreview.net/pdf?id=EMkrwJY2de" class="btn btn-sm z-depth-0" role="button">PDF</a>
117+
<a href="https://github.com/RelationalML/SpectralPruningBraess" class="btn btn-sm z-depth-0" role="button">Code</a>
117118
</div>
118119

119120

@@ -184,10 +185,57 @@ <h1 id="publications">Publications</h1>
184185
<li><!-- _layouts/bib.html -->
185186

186187

187-
<div class="row" style="margin-top: 30px;" id="bib-Hossain2024" authors="HossainFanfaniFischerQuackenbushBurkholz">
188+
<div class="row" style="margin-top: 30px;" id="bib-hossain2024pruning" authors="HossainFischerBurkholzQuackenbush">
188189
<div class="preview"><img class="preview z-depth-1 rounded" src="/images/publications/default.png" /></div>
189190
<!-- Entry bib key -->
190-
<div id="Hossain2024" class="col-sm-10">
191+
<div id="hossain2024pruning" class="col-sm-10">
192+
<!-- Title -->
193+
194+
<div class="title"><b>Pruning neural network models for gene regulatory dynamics using data and domain knowledge</b></div>
195+
196+
<!-- Author -->
197+
<div class="author">
198+
199+
200+
Intekhab Hossain,&nbsp;Jonas Fischer,&nbsp;Rebekka Burkholz,&nbsp;and&nbsp;John Quackenbush</div>
201+
202+
<!-- Journal/Book title and date -->
203+
204+
205+
<div class="periodical">
206+
<em>In Thirty-eighth Conference on Neural Information Processing Systems</em>, 2024
207+
</div>
208+
<div class="periodical">
209+
210+
</div>
211+
212+
<!-- Links/Buttons -->
213+
<div class="links"><a class="conf btn btn-sm z-depth-0">NeurIPS</a><a class="bibtex btn btn-sm z-depth-0" role="button">Bib</a>
214+
<a class="abstract btn btn-sm z-depth-0" role="button">Abs</a>
215+
</div>
216+
217+
218+
<!-- Hidden abstract block -->
219+
<div class="abstract hidden">
220+
<p>The practical utility of machine learning models in the sciences often hinges on their interpretability. It is common to assess a model’s merit for scientific discovery, and thus novel insights, by how well it aligns with already available domain knowledge - a dimension that is currently largely disregarded in the comparison of neural network models. While pruning can simplify deep neural network architectures and excels in identifying sparse models, as we show in the context of gene regulatory network inference, state-of-the-art techniques struggle with biologically meaningful structure learning. To address this issue, we propose DASH, a generalizable framework that guides network pruning by using domain-specific structural information in model fitting and leads to sparser, better interpretable models that are more robust to noise. Using both synthetic data with ground truth information, as well as real-world gene expression data, we show that DASH, using knowledge about gene interaction partners within the putative regulatory network, outperforms general pruning methods by a large margin and yields deeper insights into the biological systems being studied.</p>
221+
</div><!-- Hidden bibtex block -->
222+
<div class="bibtex hidden">
223+
<figure class="highlight"><pre><code class="language-bibtex" data-lang="bibtex"><span class="nc">@inproceedings</span><span class="p">{</span><span class="nl">hossain2024pruning</span><span class="p">,</span>
224+
<span class="na">title</span> <span class="p">=</span> <span class="s">{Pruning neural network models for gene regulatory dynamics using data and domain knowledge}</span><span class="p">,</span>
225+
<span class="na">author</span> <span class="p">=</span> <span class="s">{Hossain, Intekhab and Fischer, Jonas and Burkholz, Rebekka and Quackenbush, John}</span><span class="p">,</span>
226+
<span class="na">booktitle</span> <span class="p">=</span> <span class="s">{Thirty-eighth Conference on Neural Information Processing Systems}</span><span class="p">,</span>
227+
<span class="na">year</span> <span class="p">=</span> <span class="s">{2024}</span>
228+
<span class="p">}</span></code></pre></figure>
229+
</div>
230+
</div>
231+
</div></li>
232+
<li><!-- _layouts/bib.html -->
233+
234+
235+
<div class="row" style="margin-top: 30px;" id="bib-hossain2024biologically" authors="HossainFanfaniFischerQuackenbushBurkholz">
236+
<div class="preview"><img class="preview z-depth-1 rounded" src="/images/publications/default.png" /></div>
237+
<!-- Entry bib key -->
238+
<div id="hossain2024biologically" class="col-sm-10">
191239
<!-- Title -->
192240

193241
<div class="title"><a href="https://doi.org/10.1186/s13059-024-03264-0"><b>Biologically informed NeuralODEs for genome-wide regulatory dynamics</b></a></div>
@@ -219,7 +267,7 @@ <h1 id="publications">Publications</h1>
219267
<p>Gene regulatory network (GRN) models that are formulated as ordinary differential equations (ODEs) can accurately explain temporal gene expression patterns and promise to yield new insights into important cellular processes, disease progression, and intervention design. Learning such gene regulatory ODEs is challenging, since we want to predict the evolution of gene expression in a way that accurately encodes the underlying GRN governing the dynamics and the nonlinear functional relationships between genes. Most widely used ODE estimation methods either impose too many parametric restrictions or are not guided by meaningful biological insights, both of which impede either scalability, explainability, or both.</p>
220268
</div><!-- Hidden bibtex block -->
221269
<div class="bibtex hidden">
222-
<figure class="highlight"><pre><code class="language-bibtex" data-lang="bibtex"><span class="nc">@article</span><span class="p">{</span><span class="nl">Hossain2024</span><span class="p">,</span>
270+
<figure class="highlight"><pre><code class="language-bibtex" data-lang="bibtex"><span class="nc">@article</span><span class="p">{</span><span class="nl">hossain2024biologically</span><span class="p">,</span>
223271
<span class="na">author</span> <span class="p">=</span> <span class="s">{Hossain, Intekhab and Fanfani, Viola and Fischer, Jonas and Quackenbush, John and Burkholz, Rebekka}</span><span class="p">,</span>
224272
<span class="na">title</span> <span class="p">=</span> <span class="s">{Biologically informed NeuralODEs for genome-wide regulatory dynamics}</span><span class="p">,</span>
225273
<span class="na">journal</span> <span class="p">=</span> <span class="s">{Genome Biology}</span><span class="p">,</span>

0 commit comments

Comments
 (0)