if (evlist->mmap[idx].base != NULL) {
                munmap(evlist->mmap[idx].base, evlist->mmap_len);
                evlist->mmap[idx].base = NULL;
+               evlist->mmap[idx].fd = -1;
                atomic_set(&evlist->mmap[idx].refcnt, 0);
        }
        auxtrace_mmap__munmap(&evlist->mmap[idx].auxtrace_mmap);
 
 static int perf_evlist__alloc_mmap(struct perf_evlist *evlist)
 {
+       int i;
+
        evlist->nr_mmaps = cpu_map__nr(evlist->cpus);
        if (cpu_map__empty(evlist->cpus))
                evlist->nr_mmaps = thread_map__nr(evlist->threads);
        evlist->mmap = zalloc(evlist->nr_mmaps * sizeof(struct perf_mmap));
+       for (i = 0; i < evlist->nr_mmaps; i++)
+               evlist->mmap[i].fd = -1;
        return evlist->mmap != NULL ? 0 : -ENOMEM;
 }
 
                evlist->mmap[idx].base = NULL;
                return -1;
        }
+       evlist->mmap[idx].fd = fd;
 
        if (auxtrace_mmap__mmap(&evlist->mmap[idx].auxtrace_mmap,
                                &mp->auxtrace_mp, evlist->mmap[idx].base, fd))