Lines Matching refs:sc_admin_q
279 nvme_write8(sc, NVME_ASQ, NVME_DMA_DVA(sc->sc_admin_q->q_sq_dmamem));
282 nvme_write8(sc, NVME_ACQ, NVME_DMA_DVA(sc->sc_admin_q->q_cq_dmamem));
286 nvme_write4(sc, NVME_AQA, NVME_AQA_ACQS(sc->sc_admin_q->q_entries) |
287 NVME_AQA_ASQS(sc->sc_admin_q->q_entries));
404 sc->sc_admin_q = nvme_q_alloc(sc, NVME_ADMIN_Q, adminq_entries,
406 if (sc->sc_admin_q == NULL) {
411 if (sc->sc_intr_establish(sc, NVME_ADMIN_Q, sc->sc_admin_q))
432 nvme_ccbs_free(sc->sc_admin_q);
433 nvme_ccbs_alloc(sc->sc_admin_q, sc->sc_admin_q->q_entries);
485 nvme_q_free(sc, sc->sc_admin_q);
581 nvme_q_free(sc, sc->sc_admin_q);
604 nvme_q_reset(sc, sc->sc_admin_q);
605 if (sc->sc_intr_establish(sc, NVME_ADMIN_Q, sc->sc_admin_q)) {
715 ccb = nvme_ccb_get(sc->sc_admin_q, false);
720 nvme_ccb_put(sc->sc_admin_q, ccb);
734 rv = nvme_poll(sc, sc->sc_admin_q, ccb, nvme_sqe_fill, NVME_TIMO_IDENT);
737 nvme_ccb_put(sc->sc_admin_q, ccb);
981 struct nvme_queue *q = sc->sc_admin_q;
1116 struct nvme_queue *q = sc->sc_admin_q;
1274 q = is_adminq ? sc->sc_admin_q : nvme_get_q(sc);
1584 ccb = nvme_ccb_get(sc->sc_admin_q, false);
1595 rv = nvme_poll(sc, sc->sc_admin_q, ccb, nvme_fill_identify,
1599 nvme_ccb_put(sc->sc_admin_q, ccb);
1646 ccb = nvme_ccb_get(sc->sc_admin_q, false);
1661 rv = nvme_poll(sc, sc->sc_admin_q, ccb, nvme_sqe_fill, NVME_TIMO_QOP);
1676 rv = nvme_poll(sc, sc->sc_admin_q, ccb, nvme_sqe_fill, NVME_TIMO_QOP);
1680 nvme_ccb_put(sc->sc_admin_q, ccb);
1687 nvme_ccb_put(sc->sc_admin_q, ccb);
1698 ccb = nvme_ccb_get(sc->sc_admin_q, false);
1708 rv = nvme_poll(sc, sc->sc_admin_q, ccb, nvme_sqe_fill, NVME_TIMO_QOP);
1719 rv = nvme_poll(sc, sc->sc_admin_q, ccb, nvme_sqe_fill, NVME_TIMO_QOP);
1724 nvme_ccb_put(sc->sc_admin_q, ccb);
1754 ccb = nvme_ccb_get(sc->sc_admin_q, false);
1769 rv = nvme_poll(sc, sc->sc_admin_q, ccb, nvme_pt_fill, NVME_TIMO_QOP);
2057 nvme_q_complete(sc, sc->sc_admin_q);