static void amdvi_log_event(AMDVIState *s, uint64_t *evt) { /* event logging not enabled */ if (!s->evtlog_enabled || amdvi_test_mask(s, AMDVI_MMIO_STATUS, AMDVI_MMIO_STATUS_EVT_OVF)) { return; } /* event log buffer full */ if (s->evtlog_tail >= s->evtlog_len) { amdvi_assign_orq(s, AMDVI_MMIO_STATUS, AMDVI_MMIO_STATUS_EVT_OVF); /* generate interrupt */ amdvi_generate_msi_interrupt(s); return; } if (dma_memory_write(&address_space_memory, s->evtlog + s->evtlog_tail, &evt, AMDVI_EVENT_LEN)) { trace_amdvi_evntlog_fail(s->evtlog, s->evtlog_tail); } s->evtlog_tail += AMDVI_EVENT_LEN; amdvi_assign_orq(s, AMDVI_MMIO_STATUS, AMDVI_MMIO_STATUS_COMP_INT); amdvi_generate_msi_interrupt(s); }
static void amdvi_generate_msi_interrupt(AMDVIState *s) { MSIMessage msg = {}; MemTxAttrs attrs = { .requester_id = pci_requester_id(&s->pci.dev) }; if (msi_enabled(&s->pci.dev)) { msg = msi_get_message(&s->pci.dev, 0); address_space_stl_le(&address_space_memory, msg.address, msg.data, attrs, NULL); } } static void amdvi_log_event(AMDVIState *s, uint64_t *evt) { /* event logging not enabled */ if (!s->evtlog_enabled || amdvi_test_mask(s, AMDVI_MMIO_STATUS, AMDVI_MMIO_STATUS_EVT_OVF)) { return; } /* event log buffer full */ if (s->evtlog_tail >= s->evtlog_len) { amdvi_assign_orq(s, AMDVI_MMIO_STATUS, AMDVI_MMIO_STATUS_EVT_OVF); /* generate interrupt */ amdvi_generate_msi_interrupt(s); return; } if (dma_memory_write(&address_space_memory, s->evtlog + s->evtlog_tail, &evt, AMDVI_EVENT_LEN)) { trace_amdvi_evntlog_fail(s->evtlog, s->evtlog_tail); } s->evtlog_tail += AMDVI_EVENT_LEN; amdvi_assign_orq(s, AMDVI_MMIO_STATUS, AMDVI_MMIO_STATUS_COMP_INT); amdvi_generate_msi_interrupt(s); } static void amdvi_setevent_bits(uint64_t *buffer, uint64_t value, int start, int length) { int index = start / 64, bitpos = start % 64; uint64_t mask = MAKE_64BIT_MASK(start, length); buffer[index] &= ~mask; buffer[index] |= (value << bitpos) & mask; }