/** * Flush the command parser and execute the commands. When the parser buffer * is empty, the callback is not invoked. */ void ilo_cp_flush_internal(struct ilo_cp *cp) { const bool do_exec = !(ilo_debug & ILO_DEBUG_NOHW); struct intel_bo *bo; unsigned used; int err; bo = ilo_cp_end_batch(cp, &used); if (!bo) return; if (likely(do_exec)) { err = intel_winsys_submit_bo(cp->winsys, cp->ring, bo, used, cp->render_ctx, cp->one_off_flags); } else { err = 0; } cp->one_off_flags = 0; if (!err) { if (cp->last_submitted_bo) intel_bo_unreference(cp->last_submitted_bo); cp->last_submitted_bo = bo; intel_bo_reference(cp->last_submitted_bo); if (cp->flush_callback) cp->flush_callback(cp, cp->flush_callback_data); } ilo_builder_begin(&cp->builder); }
static VkResult queue_submit_bo(struct intel_queue *queue, struct intel_bo *bo, VkDeviceSize used) { struct intel_winsys *winsys = queue->dev->winsys; int err; if (intel_debug & INTEL_DEBUG_NOHW) err = 0; else err = intel_winsys_submit_bo(winsys, queue->ring, bo, used, 0); return (err) ? VK_ERROR_DEVICE_LOST : VK_SUCCESS; }
/** * Flush the command parser and execute the commands. When the parser buffer * is empty, the callback is not invoked. */ void ilo_cp_submit_internal(struct ilo_cp *cp) { const bool do_exec = !(ilo_debug & ILO_DEBUG_NOHW); struct intel_bo *bo; unsigned used; int err; bo = ilo_cp_end_batch(cp, &used); if (!bo) return; if (likely(do_exec)) { err = intel_winsys_submit_bo(cp->winsys, cp->ring, bo, used, cp->render_ctx, cp->one_off_flags); } else { err = 0; } cp->one_off_flags = 0; if (!err) { bool guilty; intel_bo_unref(cp->last_submitted_bo); cp->last_submitted_bo = intel_bo_ref(bo); guilty = ilo_cp_detect_hang(cp); if (unlikely((ilo_debug & ILO_DEBUG_BATCH) || guilty)) { ilo_builder_decode(&cp->builder); if (guilty) abort(); } if (cp->submit_callback) cp->submit_callback(cp, cp->submit_callback_data); } ilo_builder_begin(&cp->builder); }