-
Notifications
You must be signed in to change notification settings - Fork 31
/
Copy pathindex.html
472 lines (299 loc) · 17.6 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<link rel="shortcut icon" href="/assets/img/mediaeval-favicon.png" type="image/png">
<!--Import Google Icon Font-->
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<!--Import materialize.css-->
<link type="text/css" rel="stylesheet" href="/assets/css/materialize.min.css" media="screen,projection"/>
<link type="text/css" rel="stylesheet" href="/assets/css/main.css" media="screen,projection"/>
<!-- Add icon library -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css">
<!--Let browser know website is optimized for mobile-->
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<meta name="twitter:card" content="summary" /> <!--<meta name="twitter:card" content="summary_large_image" />-->
<meta name="twitter:image" content="https://multimediaeval.github.io/assets/img/twitter-card-summary.png"/>
<meta name="twitter:image:alt" content="Picture of group of people participants of the 2019 MediaEval workshop"/>
<!-- Global site tag (gtag.js) - Google Analytics -->
<!-- seo tags -->
<!-- Begin Jekyll SEO tag v2.8.0 -->
<title>MediaEval Benchmark | MediaEval Benchmarking Initiative Multimedia Evaluation</title>
<meta name="generator" content="Jekyll v3.9.2" />
<meta property="og:title" content="MediaEval Benchmark" />
<meta property="og:locale" content="en_US" />
<meta name="description" content="MediaEval Benchmarking Initiative Multimedia Evaluation" />
<meta property="og:description" content="MediaEval Benchmarking Initiative Multimedia Evaluation" />
<link rel="canonical" href="https://multimediaeval.github.io/" />
<meta property="og:url" content="https://multimediaeval.github.io/" />
<meta property="og:site_name" content="MediaEval Benchmark" />
<meta property="og:image" content="https://multimediaeval.github.io/assets/img/twitter-card.png" />
<meta property="og:type" content="website" />
<link rel="next" href="https://multimediaeval.github.io/blog/page2" />
<meta name="twitter:card" content="summary_large_image" />
<meta property="twitter:image" content="https://multimediaeval.github.io/assets/img/twitter-card.png" />
<meta property="twitter:title" content="MediaEval Benchmark" />
<script type="application/ld+json">
{"@context":"https://schema.org","@type":"WebSite","description":"MediaEval Benchmarking Initiative Multimedia Evaluation","headline":"MediaEval Benchmark","image":"https://multimediaeval.github.io/assets/img/twitter-card.png","name":"MediaEval Benchmark","url":"https://multimediaeval.github.io/"}</script>
<!-- End Jekyll SEO tag -->
<!-- Manual seo tags -->
</head>
<body>
<div class="navbar-fixed">
<nav>
<div class="nav-wrapper green darken-4">
<a href="/" class="brand-logo" style="padding-left: 20px">
<h3 class="northumbria" style="margin-top: 20px;color:white;">Mediaeval</h3>
</a>
<!-- Small screen definition -->
<a href="#" data-target="mobile-nav" class="sidenav-trigger">
<i class="material-icons" style="color:white;">menu</i>
</a>
<!-- Big screen Structure -->
<ul class="materialize right hide-on-med-and-down">
<li><a href="/"><i class="material-icons" style="color:white;">home</i></a></li>
<li><a href="/editions/2025/" style="color:white;">MediaEval 2025</a></li>
<li><a class="dropdown-trigger" data-target="editions" style="color:white;">
MediaEval History<i class="material-icons right">arrow_drop_down</i>
</a></li>
<li><a href="/philosophy/" style="color:white;">MediaEval Philosophy</a></li>
<li><a href="/about/" style="color:white;">About MediaEval</a></li>
<li><a href="/bib/" style="color:white;">Bibliography</a></li>
</ul>
</div>
</nav>
</div>
<!-- Big screen dropdown Structure -->
<ul class="dropdown-content materialize" id="editions">
<li><a href="/editions/2023/">MediaEval 2023</a></li>
<li><a href="/editions/2022/">MediaEval 2022</a></li>
<li><a href="/editions/2021/">MediaEval 2021</a></li>
<li><a href="/editions/2020/">MediaEval 2020</a></li>
<li><a href="http://www.multimediaeval.org/" target="_blank">pre-2020</a></li>
</ul>
<!-- Mobile Structure -->
<ul class="materialize sidenav green darken-4" id="mobile-nav">
<li><a href="/" style="color:white;">Home</a></li>
<li><a href="/editions/2025/" style="color:white;">MediaEval 2025</a></li>
<li><a class="dropdown-trigger" data-target="mobile-editions" style="color:white;">
MediaEval History<i class="material-icons right" style="color:white;">arrow_drop_down</i>
</a></li>
<li><a href="/philosophy/" style="color:white;">MediaEval Philosophy</a></li>
<li><a href="/about/" style="color:white;">About MediaEval</a></li>
<li><a href="/bib/" style="color:white;">Bibliography</a></li>
</ul>
<!-- Mobile dropdown Structure -->
<ul class="dropdown-content materialize" id="mobile-editions">
<li><a href="/editions/2023/">MediaEval 2023</a></li>
<li><a href="/editions/2022/">MediaEval 2022</a></li>
<li><a href="/editions/2021/">MediaEval 2021</a></li>
<li><a href="/editions/2020/">MediaEval 2020</a></li>
<li><a href="http://www.multimediaeval.org/" target="_blank">pre-2020</a></li>
</ul>
<header class="jumbotron">
<div class="content">
<h1 class='center white-text northumbria'>
Multimedia Evaluation Benchmark
</h1>
</div>
</header>
<div class="container">
<div class="row">
<article >
<section class="post-content" align="left">
<div class="posts">
<div>
<span>
<header>
<h4>
MediaEval 2025 Call for Task Proposals
</h4>
<div class="post-info">
<p class="meta">
September 24, 2024
</p>
</div>
</header>
<div>
<p>The Multimedia Evaluation Benchmark, MediaEval, offers challenges in artificial intelligence related to data that includes multiple modalities (e.g., audio, visual, textual, and/or contextual). The goal of MediaEval is to develop and evaluate new
algorithms and technologies for analyzing, exploring and accessing information in multimedia data. MediaEval pursues a “Quest for Insight”: we push beyond improving evaluation scores to achieving deeper understanding about the challenges, including
data and the strengths and weaknesses of particular types of approaches. Our larger aim is to promote reproducible research that makes multimedia a positive force for society. MediaEval is now calling for proposals for tasks to run in the 2025
benchmarking season.</p>
<ul>
<li>Call for Task proposals (first deadline): Wed. 11 December</li>
<li>Call for Task proposals (final deadline): Wed. 22 January</li>
</ul>
<p>The proposal should describe the motivation of the task, including a description of the use scenario in which the results of the tasks would be used (e.g., application that serves users). It should provide a definition of the specific problem that task
participants are required to solve. Also, it should include information on the data (including source and licensing), and on how the solutions developed by task participants will be evaluated (the metric and description of how the metric is related to the
use scenario). We ask you to think carefully about specific research questions that are related to the challenge, and mention these in the proposal. These research questions will guide participants in pursuing the “Quest for Insight”, i.e., going beyond
thinking only about evaluation scores. Finally, the proposal must also include a statement of how the task is related to MediaEval (i.e., the human or social component), and how it extends the state of the art.</p>
<h5 id="indication-of-intent">Indication of Intent</h5>
<p>If you plan to submit a task proposal, we strongly suggest that you submit, by email, an “Indication of Intent” in the form of a short task summary (a blurb of 50-100 words) as soon as possible. The description should include a clear statement of what
participants are expected to do, which data is used, and how participant submissions are evaluated. The summary should finish with a statement of the motivation for the task.</p>
<h5 id="full-task-proposal">Full Task Proposal</h5>
<p>A task proposal contains the following elements. Note that there is no specified length for the proposal, but in general proposals do not exceed three pages.</p>
<h6 id="part-i-task-description">Part I: Task Description</h6>
<p>This is a version of your task description that will be posted to the MediaEval website. Its goal is to inform and attract the interest of potential participants. It consists of the following parts:</p>
<ul>
<li>Task title: Give your task an informative title.</li>
<li>Task description: State the goal of the task and what is expected of task participants in a simple easy-to-understand manner. The task description should make clear what the task requires of participants.</li>
<li>Motivation and background: Describe the motivating use scenario, i.e., how would the results of the task be used in an application that serves users. Also, state how the task extends the state of the art.</li>
<li>Target group: Describe the type of researchers who would be interested in participating in the task.</li>
<li>Data: Describe the data, including how the data will be collected and licensed.</li>
<li>Evaluation methodology: Describe the evaluation methodology, including how the ground truth will be created and the evaluation metrics.</li>
<li>“Quest for Insight”: List several research questions related to the challenge, which the participants can strive to answer in order to go beyond just looking at evaluation metrics.</li>
<li>References and recommended reading: list 3-4 references related to the task that teams should have read before attempting the task.</li>
<li>List of task organizers. (Designate a lead task organizer whose contact details will appear on the website for the task.)</li>
</ul>
<h6 id="part-ii-big-picture-of-the-task">Part II: Big Picture of the Task</h6>
<p>Please address each of the following points with 2-3 sentences each:</p>
<p>Innovation: MediaEval strives to offer innovative tasks. New tasks open up new terrain for multimedia researchers, continuing tasks introduce novel aspects every year that drive forward the state of the art. Describe the novel contribution of your task.
Focus: MediaEval focuses on tasks that have a human or social aspect. This means that they serve groups of users, work with multimedia content produced by users, and/or address issues of affect and subjectivity. MediaEval strives to promote reproducible
research that makes multimedia a positive force for society. Please comment on the human or social aspect of your task. Risk management: What are the main risks that you foresee for the task, and how you plan to address them (i.e., what challenges will
you face in organizing the task and how do you expect to overcome them)?</p>
<h6 id="part-iii-task-organization-team">Part III: Task Organization Team</h6>
<p>Write a very brief paragraph outlining the relevant interests and experience of your organizing team. Your team should be large enough to handle the organization and management of the task. This includes evaluating participant runs, and carrying out
failure analysis on the results. Ideally teams should consist of members from multiple research sites and multiple projects. A mix of experienced and early-career researchers is preferred. MediaEval has a strong tradition of encouraging and supporting
early-stage researchers in gaining experience in organization of benchmark tasks. Note that your task team can add members after the proposal has been accepted.</p>
<h5 id="submission">Submission</h5>
<p>Please submit your proposal (as a text file, .doc, .docx or link to an editable Google doc file) by emailing it to Martha Larson m (dot) larson (at) cs (dot) ru (dot) nl with Steven Hicks steven (at) simula (dot) no and Mihai Gabriel (Gabi) Constantin
mihai.constantin84 (at) upb (dot) ro on cc.</p>
<h5 id="mediaeval-2025-schedule">MediaEval 2025 Schedule</h5>
<ul>
<li>Registration for task participation opens: April 2025</li>
<li>Development data release: May 2025</li>
<li>Test data release: June 2025</li>
<li>Runs due: Wed. 24 Sept 2025</li>
<li>Working notes papers due: Wed. 8 Oct 2025</li>
<li>MediaEval 2025 Workshop, Sat.-Sun. 25-26 October 2025, Dublin, Ireland and Online.</li>
</ul>
<p>The MediaEval 2025 workshop will be held during a weekend so that it can occur exactly between ACM Multimedia 2025 and CBMI 2025. The scheduling helps to reduce traveling for those who are traveling. The workshop will be officially co-located with CBMI 2025.</p>
</div>
</span>
</div>
<hr />
<div>
<span>
<header>
<h4>
MediaEval Workshop Registration and Information
</h4>
<div class="post-info">
<p class="meta">
January 13, 2024
</p>
</div>
</header>
<div>
<p>The 14th Annual MediaEval Workshop will take place Thursday-Friday 1-2...
<a href="/2024/01/13/workshop-registration.html">read more</a>
</div>
</span>
</div>
<hr />
<div>
<span>
<header>
<h4>
MediaEval 2023 Registration
</h4>
<div class="post-info">
<p class="meta">
July 10, 2023
</p>
</div>
</header>
<div>
<p>The Benchmarking Initiative for Multimedia Evaluation (MediaEval) offers challenges related...
<a href="/2023/07/10/workshop-registration.html">read more</a>
</div>
</span>
</div>
<hr />
<div>
<span>
<header>
<h4>
MediaEval 2023 Call for Task Proposals
</h4>
<div class="post-info">
<p class="meta">
January 20, 2023
</p>
</div>
</header>
<div>
<p>The Multimedia Evaluation Benchmark, MediaEval, offers challenges in artificial intelligence...
<a href="/2023/01/20/call.html">read more</a>
</div>
</span>
</div>
<hr />
<div>
<span>
<header>
<h4>
MediaEval Workshop Information
</h4>
<div class="post-info">
<p class="meta">
January 6, 2023
</p>
</div>
</header>
<div>
<p>The MediaEval 2022 workshop will take place Thursday-Friday 12-13 January...
<a href="/2023/01/06/workshop-information.html">read more</a>
</div>
</span>
</div>
<hr />
</div>
<ul class="pagination">
<li class="disabled"><a><i class="material-icons">chevron_left</i></a></li>
<li class="active"><a href="/">1</a></li>
<li class="waves-effect"><a href="/blog/page2">2</a></li>
<li class="waves-effect"><a href="/blog/page3">3</a></li>
<li class="waves-effect"><a href="/blog/page2"><i class="material-icons">chevron_right</i></a></li>
</ul>
</section>
</article>
</div>
</div>
<footer class='page-footer green darken-4'>
<div class='container'>
<div class='row'>
<div class="col l6 s12">
<h5 class="white-text">What is MediaEval?</h5>
<p>MediaEval is a benchmarking initiative dedicated to evaluating new algorithms for multimedia access and retrieval.
It emphasizes the ‘multi’ in multimedia and focuses on human and social aspects of multimedia tasks.</p>
<p>For more information contact Martha Larson m.larson (at) cs.ru.nl</p>
</div>
<div class="col l4 offset-l2 s12">
<h5 class="white-text">Links</h5>
<a href='https://twitter.com/multimediaeval' target="_blank">
<i class="fa fa-twitter fa-3x" aria-hidden="true" style="color:white;"></i>
</a>
<a href='https://github.com/multimediaeval' target="_blank">
<i class="fa fa-github fa-3x" aria-hidden="true" style="color:white;"></i>
</a>
<a href='https://www.flickr.com/photos/69524595@N06/' target="_blank">
<i class="fa fa-flickr fa-3x" aria-hidden="true" style="color:white;"></i>
</a>
<a href='https://www.youtube.com/channel/UCc-1NW1Uo2o_zI4F81iyTcw' target="_blank">
<i class="fa fa-youtube fa-3x" aria-hidden="true" style="color:white;"></i>
</a>
</div>
</div>
</div>
<div class="footer-copyright">
<div class="container grey-text text-lighten-4">
© 2020 MediaEval Multimedia Benchmark
</div>
</div>
</footer>
<!--JavaScript at end of body for optimized loading-->
<script type="text/javascript" src="/assets/js/materialize.min.js"></script>
<script type="text/javascript" src="/assets/js/main.js"></script>
</body>
</html>