... | ... | @@ -63,11 +63,11 @@ Thursday at 13:00 in 26-00/536 |
|
|
<td rowspan="2">2019-10-31</td>
|
|
|
<td>Étienne</td>
|
|
|
<td><strong>Matching the Blanks: Distributional Similarity for Relation Learning</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1906.03158)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1906.03158)</td>
|
|
|
</tr><tr>
|
|
|
<td>Valentin</td>
|
|
|
<td><strong>[Answering Complex Open-domain Questions Through Iterative Query Generation](uploads/9c2a244a3f898a42ed51a2ed226b4578/Langage_31_10.pdf)</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1910.07000)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1910.07000)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td>2019-10-24</td>
|
... | ... | @@ -79,7 +79,7 @@ Thursday at 13:00 in 26-00/536 |
|
|
<td>2019-10-10</td>
|
|
|
<td>Adrien</td>
|
|
|
<td><strong>Adversarial Learning of Task-Oriented Neural Dialog Models</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1805.11762.pdf)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1805.11762.pdf)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td>2019-07-12</td>
|
... | ... | @@ -135,7 +135,7 @@ Thursday at 13:00 in 26-00/536 |
|
|
<td rowspan="2">2019-06-13</td>
|
|
|
<td>Adrien</td>
|
|
|
<td><strong>Document Expansion by Query Prediction</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1904.08375)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1904.08375)</td>
|
|
|
</tr><tr>
|
|
|
<td>Edouard</td>
|
|
|
<td><strong>State of the Art LSTM Language Modeling</strong></td>
|
... | ... | @@ -145,40 +145,40 @@ Thursday at 13:00 in 26-00/536 |
|
|
<td rowspan="4">2019-06-06</td>
|
|
|
<td>Adrien</td>
|
|
|
<td><strong>From Neural Re-Ranking to Neural Ranking: Learning a Sparse Representation for Inverted Indexing</strong></td>
|
|
|
<td>[papier](https://people.cs.umass.edu/~elm/papers/zamani.pdf)</td>
|
|
|
<td>[paper](https://people.cs.umass.edu/~elm/papers/zamani.pdf)</td>
|
|
|
</tr><tr>
|
|
|
<td>Jean-Yves</td>
|
|
|
<td><strong>Learning Embeddings into Entropic Wasserstein Spaces</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1905.03329)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1905.03329)</td>
|
|
|
</tr><tr>
|
|
|
<td rowspan="2">Bruno</td>
|
|
|
<td><strong>Fair is Better than Sensational: Man is to Doctor as Woman is to Doctor</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1905.09866.pdf)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1905.09866.pdf)</td>
|
|
|
</tr><tr>
|
|
|
<td><strong>Knowledge-Augmented Language Model and Its Application to Unsupervised Named-Entity Recognition</strong></td>
|
|
|
<td>[papier](https://www.aclweb.org/anthology/N19-1117)</td>
|
|
|
<td>[paper](https://www.aclweb.org/anthology/N19-1117)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td rowspan="2">2019-04-18</td>
|
|
|
<td>Étienne</td>
|
|
|
<td><strong>Non-monotonic Sequential Text Generation</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1902.02192)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1902.02192)</td>
|
|
|
</tr><tr>
|
|
|
<td>Valentin</td>
|
|
|
<td><strong>Atention is not Explanation</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1902.10186)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1902.10186)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td>2019-04-11</td>
|
|
|
<td>Adrien</td>
|
|
|
<td><strong>« Hé Manu, tu descends ? » : identification nommée du locuteur dans les dialogues</strong></td>
|
|
|
<td>[papier](http://www.asso-aria.org/coria/2019/CORIA_2019_paper_14.pdf)</td>
|
|
|
<td>[paper](http://www.asso-aria.org/coria/2019/CORIA_2019_paper_14.pdf)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td>2019-03-28</td>
|
|
|
<td>Valentin</td>
|
|
|
<td><strong>[Meta-Learning for Low-Resource Neural Machine Translation](uploads/bbca209c4411f5f129188e17036d9357/val.pdf)</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1808.08437)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1808.08437)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td>2019-03-21</td>
|
... | ... | @@ -190,21 +190,21 @@ Thursday at 13:00 in 26-00/536 |
|
|
<td rowspan="2">2019-03-14</td>
|
|
|
<td>Charles</td>
|
|
|
<td><strong>[Multiple Attribute Text Rewriting](uploads/fdde0cd837159a7b8694fa534f1d0bda/charles.pdf)</strong></td>
|
|
|
<td>[papier](https://openreview.net/pdf?id=H1g2NhC5KQ)</td>
|
|
|
<td>[paper](https://openreview.net/pdf?id=H1g2NhC5KQ)</td>
|
|
|
</tr><tr>
|
|
|
<td>Valentin</td>
|
|
|
<td><strong>[Multi-step Retriever-Reader Interaction for Scalable Open-domain Question Answering](uploads/239a55c7c1e286d2658fdccacad6a395/valentin.pdf)</strong></td>
|
|
|
<td>[papier](https://openreview.net/pdf?id=HkfPSh05K7)</td>
|
|
|
<td>[paper](https://openreview.net/pdf?id=HkfPSh05K7)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td rowspan="3">2019-03-07</td>
|
|
|
<td>Eloi</td>
|
|
|
<td><strong>Do Neural Network Cross-Modal Mappings Really Bridge Modalities?</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1805.07616)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1805.07616)</td>
|
|
|
</tr><tr>
|
|
|
<td>Adrien</td>
|
|
|
<td><strong>[From Natural Language to Keyword Queries](uploads/4c863d108840dcca02ffdb9598227007/adrien_p_prez.pdf)</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1809.01495)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1809.01495)</td>
|
|
|
</tr><tr>
|
|
|
<td>Étienne</td>
|
|
|
<td><strong>Word Embeddings and PMI Factorization</strong></td>
|
... | ... | @@ -220,7 +220,7 @@ Thursday at 13:00 in 26-00/536 |
|
|
<td rowspan="3">2019-02-21</td>
|
|
|
<td>Valentin</td>
|
|
|
<td><strong>[Multi-style Generative Reading Comprehension](uploads/589427a9b6e8ff155f4b648c4db54aca/valentin.pdf)</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1901.02262)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1901.02262)</td>
|
|
|
</tr><tr>
|
|
|
<td>Edouard</td>
|
|
|
<td><strong>[Variational Autoencoding of Textual Data](uploads/e8ce27a64b440794be85bd93bc78cfe0/edouard.pdf)</strong></td>
|
... | ... | @@ -228,7 +228,7 @@ Thursday at 13:00 in 26-00/536 |
|
|
</tr><tr>
|
|
|
<td>Jean-Yves</td>
|
|
|
<td><strong>Universal Transformers</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1807.03819.pdf)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1807.03819.pdf)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td>2019-02-14</td>
|
... | ... | @@ -240,19 +240,19 @@ Thursday at 13:00 in 26-00/536 |
|
|
<td rowspan="4">2019-02-07</td>
|
|
|
<td>Étienne</td>
|
|
|
<td><strong>[Compressing Word Embeddings via Deep Compositional Code Learning](uploads/6114633bf6b73f25f301c9665f73842e/0.pdf)</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1711.01068)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1711.01068)</td>
|
|
|
</tr><tr>
|
|
|
<td>Bruno</td>
|
|
|
<td><strong>A Joint Many-Task Model: Growing a Neural Network for Multiple NLP Tasks</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1611.01587)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1611.01587)</td>
|
|
|
</tr><tr>
|
|
|
<td>Patrick</td>
|
|
|
<td><strong>Word Translation without Parallel Data</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1710.04087)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1710.04087)</td>
|
|
|
</tr><tr>
|
|
|
<td>Clara</td>
|
|
|
<td><strong>TransNets: Learning to Transform for Recommendation</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1704.02298)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1704.02298)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td>2019-01-31</td>
|
... | ... | @@ -276,13 +276,13 @@ Thursday at 13:00 in 26-00/536 |
|
|
<td>2019-01-10</td>
|
|
|
<td>Clément</td>
|
|
|
<td><strong>[Pointer Networks](https://docs.google.com/presentation/d/11-_UoLiCqe5L_Ms4KTbsLkccRWftCs6O1HT0VLhvKi0/edit#slide=id.g49a9a33d1c_0_345)</strong></td>
|
|
|
<td>[papier](http://papers.nips.cc/paper/5866-pointer-networks.pdf)</td>
|
|
|
<td>[paper](http://papers.nips.cc/paper/5866-pointer-networks.pdf)</td>
|
|
|
</tr>
|
|
|
<tr>
|
|
|
<td rowspan="2">2018-12-20</td>
|
|
|
<td>Patrick</td>
|
|
|
<td><strong>Unsupervised machine translation using monolingual corpora only</strong></td>
|
|
|
<td>[papier](https://arxiv.org/pdf/1711.00043.pdf)</td>
|
|
|
<td>[paper](https://arxiv.org/pdf/1711.00043.pdf)</td>
|
|
|
</tr><tr>
|
|
|
<td>Étienne</td>
|
|
|
<td><strong>[Attention is all you need](uploads/9ec074f041166a253b48912fb7170f02/slides.pdf)</strong></td>
|
... | ... | |