forked from castorini/anserini
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdl19-doc.yaml
216 lines (216 loc) · 4.11 KB
/
dl19-doc.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
---
name: msmacro-doc # Note, this is the name of the collection; this way we can share index with the msmarco regressions
index_command: target/appassembler/bin/IndexCollection
index_utils_command: target/appassembler/bin/IndexReaderUtils
search_command: target/appassembler/bin/SearchCollection
topic_root: src/main/resources/topics-and-qrels/
qrels_root: src/main/resources/topics-and-qrels/
index_root:
ranking_root:
collection: CleanTrecCollection
generator: DefaultLuceneDocumentGenerator
threads: 1
index_options:
- -storePositions
- -storeDocvectors
- -storeRaw
topic_reader: TsvInt
evals:
- command: eval/trec_eval.9.0.4/trec_eval
params:
- -m map
- -c
separator: "\t"
parse_index: 2
metric: map
metric_precision: 4
can_combine: true
- command: eval/trec_eval.9.0.4/trec_eval
params:
- -m ndcg_cut.10
- -c
separator: "\t"
parse_index: 2
metric: NDCG@10
metric_precision: 4
can_combine: true
- command: eval/trec_eval.9.0.4/trec_eval
params:
- -m recip_rank
- -c
separator: "\t"
parse_index: 2
metric: RR
metric_precision: 4
can_combine: true
- command: eval/trec_eval.9.0.4/trec_eval
params:
- -m recall.100
- -c
separator: "\t"
parse_index: 2
metric: R@100
metric_precision: 4
can_combine: true
- command: eval/trec_eval.9.0.4/trec_eval
params:
- -m recall.1000
- -c
separator: "\t"
parse_index: 2
metric: R@1000
metric_precision: 4
can_combine: true
input_roots:
- /tuna1/ # on tuna
- /store/ # on orca
- /scratch2/ # on damiano
input: collections/msmarco/doc/
index_path: indexes/lucene-index.msmarco-doc.pos+docvectors+raw
index_stats:
documents: 3213835
documents (non-empty): 3213835
total terms: 2748636047
topics:
- name: "[DL19 (Doc)](https://trec.nist.gov/data/deep2019.html)"
path: topics.dl19-doc.txt
qrel: qrels.dl19-doc.txt
models:
- name: bm25-default
display: BM25 (Default)
params:
- -bm25
results:
map:
- 0.3309
NDCG@10:
- 0.5190
RR:
- 0.8046
R@100:
- 0.3948
R@1000:
- 0.6966
- name: bm25-default+rm3
display: +RM3
params:
- -bm25
- -rm3
results:
map:
- 0.3870
NDCG@10:
- 0.5169
RR:
- 0.7718
R@100:
- 0.4189
R@1000:
- 0.7504
- name: bm25-default+ax
display: +Ax
params:
- -bm25
- -axiom
- -axiom.deterministic
- -rerankCutoff 20
results:
map:
- 0.3516
NDCG@10:
- 0.4730
RR:
- 0.7428
R@100:
- 0.3945
R@1000:
- 0.7323
- name: bm25-default+prf
display: +PRF
params:
- -bm25
- -bm25prf
results:
map:
- 0.3624
NDCG@10:
- 0.5105
RR:
- 0.7775
R@100:
- 0.4004
R@1000:
- 0.7357
- name: bm25-tuned
display: BM25 (Tuned)
params:
- -bm25
- -bm25.k1 3.44
- -bm25.b 0.87
results:
map:
- 0.3138
NDCG@10:
- 0.5140
RR:
- 0.8872
R@100:
- 0.3862
R@1000:
- 0.6810
- name: bm25-tuned+rm3
display: +RM3
params:
- -bm25
- -bm25.k1 3.44
- -bm25.b 0.87
- -rm3
results:
map:
- 0.3697
NDCG@10:
- 0.5485
RR:
- 0.8074
R@100:
- 0.4193
R@1000:
- 0.7282
- name: bm25-tuned+ax
display: +Ax
params:
- -bm25
- -bm25.k1 3.44
- -bm25.b 0.87
- -axiom
- -axiom.deterministic
- -rerankCutoff 20
results:
map:
- 0.3860
NDCG@10:
- 0.5245
RR:
- 0.7492
R@100:
- 0.4399
R@1000:
- 0.7545
- name: bm25-tuned+prf
display: +PRF
params:
- -bm25
- -bm25.k1 3.44
- -bm25.b 0.87
- -bm25prf
results:
map:
- 0.3858
NDCG@10:
- 0.5280
RR:
- 0.8007
R@100:
- 0.4287
R@1000:
- 0.7553