|
--- |
|
dataset_info: |
|
features: |
|
- name: query |
|
dtype: string |
|
- name: positive |
|
dtype: string |
|
- name: negative |
|
dtype: string |
|
splits: |
|
- name: train |
|
num_bytes: 2766980301 |
|
num_examples: 1391986 |
|
download_size: 1589194354 |
|
dataset_size: 2766980301 |
|
configs: |
|
- config_name: default |
|
data_files: |
|
- split: train |
|
path: data/train-* |
|
--- |
|
|
|
Training Data For Paper: ["Pooling And Attention: What Are Effective Designs For LLM-Based Embedding Models?"](https://arxiv.org/abs/2409.02727) |
|
|
|
Citation: |
|
``` |
|
@misc{poolingattentioneffectivedesigns, |
|
title={Pooling And Attention: What Are Effective Designs For LLM-Based Embedding Models?}, |
|
author={Yixuan Tang and Yi Yang}, |
|
year={2024}, |
|
eprint={2409.02727}, |
|
archivePrefix={arXiv}, |
|
primaryClass={cs.CL}, |
|
url={https://arxiv.org/abs/2409.02727}, |
|
} |
|
|
|
``` |
|
|