return ret;
 }
 
+static void submit_attach_object_fences(struct etnaviv_gem_submit *submit)
+{
+       int i;
+
+       for (i = 0; i < submit->nr_bos; i++) {
+               struct etnaviv_gem_object *etnaviv_obj = submit->bos[i].obj;
+
+               if (submit->bos[i].flags & ETNA_SUBMIT_BO_WRITE)
+                       reservation_object_add_excl_fence(etnaviv_obj->resv,
+                                                         submit->fence);
+               else
+                       reservation_object_add_shared_fence(etnaviv_obj->resv,
+                                                           submit->fence);
+
+               submit_unlock_object(submit, i);
+       }
+}
+
 static void submit_unpin_objects(struct etnaviv_gem_submit *submit)
 {
        int i;
        for (i = 0; i < submit->nr_bos; i++) {
                struct etnaviv_gem_object *etnaviv_obj = submit->bos[i].obj;
 
+               /* if the GPU submit failed, objects might still be locked */
                submit_unlock_object(submit, i);
                drm_gem_object_put_unlocked(&etnaviv_obj->base);
        }
        if (ret)
                goto out;
 
+       submit_attach_object_fences(submit);
+
        cmdbuf = NULL;
 
        if (args->flags & ETNA_SUBMIT_FENCE_FD_OUT) {
 
                etnaviv_gem_mapping_reference(submit->bos[i].mapping);
                cmdbuf->bo_map[i] = submit->bos[i].mapping;
                atomic_inc(&etnaviv_obj->gpu_active);
-
-               if (submit->bos[i].flags & ETNA_SUBMIT_BO_WRITE)
-                       reservation_object_add_excl_fence(etnaviv_obj->resv,
-                                                         fence);
-               else
-                       reservation_object_add_shared_fence(etnaviv_obj->resv,
-                                                           fence);
        }
        cmdbuf->nr_bos = submit->nr_bos;
        hangcheck_timer_reset(gpu);