Home | History | Annotate | Download | only in qat

Lines Matching defs:ae

199 qat_ae_write_4(struct qat_softc *sc, u_char ae, bus_size_t offset,
205 qat_ae_local_write_4(sc, ae, offset, value);
206 if ((qat_ae_local_read_4(sc, ae, LOCAL_CSR_STATUS) &
213 "couldn't write AE CSR: ae 0x%hhx offset 0x%lx\n", ae, (long)offset);
218 qat_ae_read_4(struct qat_softc *sc, u_char ae, bus_size_t offset,
225 v = qat_ae_local_read_4(sc, ae, offset);
226 if ((qat_ae_local_read_4(sc, ae, LOCAL_CSR_STATUS) &
234 "couldn't read AE CSR: ae 0x%hhx offset 0x%lx\n", ae, (long)offset);
239 qat_ae_ctx_indr_write(struct qat_softc *sc, u_char ae, uint32_t ctx_mask,
255 qat_ae_read_4(sc, ae, CSR_CTX_POINTER, &ctxptr);
259 qat_ae_write_4(sc, ae, CSR_CTX_POINTER, ctx);
260 qat_ae_write_4(sc, ae, offset, value);
262 qat_ae_write_4(sc, ae, CSR_CTX_POINTER, ctxptr);
266 qat_ae_ctx_indr_read(struct qat_softc *sc, u_char ae, uint32_t ctx,
283 qat_ae_read_4(sc, ae, CSR_CTX_POINTER, &ctxptr);
286 qat_ae_write_4(sc, ae, CSR_CTX_POINTER, ctx);
288 error = qat_ae_read_4(sc, ae, offset, value);
293 qat_ae_write_4(sc, ae, CSR_CTX_POINTER, ctxptr);
351 qat_aereg_rel_data_write(struct qat_softc *sc, u_char ae, u_char ctx,
373 qat_ae_read_4(sc, ae, CTX_ENABLES, &ctxen);
413 return qat_ae_exec_ucode(sc, ae, ctx, inst, ninst, 1, ninst * 5, NULL);
417 qat_aereg_rel_data_read(struct qat_softc *sc, u_char ae, u_char ctx,
437 qat_ae_read_4(sc, ae, CTX_ENABLES, &ctxen);
466 /* backup shared control store bit, and force AE to
468 qat_ae_read_4(sc, ae, AE_MISC_CONTROL, &misc);
470 qat_ae_get_shared_ustore_ae(ae, &nae);
476 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, nmisc);
479 qat_ae_read_4(sc, ae, ACTIVE_CTX_STATUS, &savctx);
480 qat_ae_read_4(sc, ae, CTX_ARB_CNTL, &ctxarbctl);
482 qat_ae_read_4(sc, ae, CTX_ENABLES, &ctxen);
489 qat_ae_write_4(sc, ae, ACTIVE_CTX_STATUS,
492 if ((error = qat_ae_ucode_read(sc, ae, 0, 1, &savucode)) != 0) {
494 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, misc);
498 qat_ae_write_4(sc, ae, ACTIVE_CTX_STATUS,
501 qat_ae_write_4(sc, ae, CTX_ARB_CNTL, ctxarbctl);
507 qat_ae_write_4(sc, ae, CTX_ENABLES,
511 qat_ae_read_4(sc, ae, USTORE_ADDRESS, &ustore_addr);
517 qat_ae_write_4(sc, ae, USTORE_ADDRESS, uaddr);
523 qat_ae_write_4(sc, ae, USTORE_DATA_LOWER, ulo);
526 qat_ae_write_4(sc, ae, USTORE_DATA_UPPER, uhi);
529 qat_ae_write_4(sc, ae, USTORE_ADDRESS, uaddr);
532 qat_ae_wait_num_cycles(sc, ae, 0x8, 0);
536 qat_ae_read_4(sc, ae, ALU_OUT, value);
539 qat_ae_write_4(sc, ae, USTORE_ADDRESS, ustore_addr);
542 error = qat_ae_ucode_write(sc, ae, 0, 1, &savucode);
546 qat_ae_write_4(sc, ae, ACTIVE_CTX_STATUS,
550 qat_ae_write_4(sc, ae, CTX_ARB_CNTL, ctxarbctl);
553 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, misc);
555 qat_ae_write_4(sc, ae, CTX_ENABLES, ctxen);
561 qat_aereg_rel_rdxfer_write(struct qat_softc *sc, u_char ae, u_char ctx,
575 error = qat_ae_read_4(sc, ae, CTX_ENABLES, &ctxen);
598 qat_ae_xfer_write_4(sc, ae, addr, value);
602 qat_ae_xfer_write_4(sc, ae, addr + dr_offset, value);
612 qat_aereg_rel_wrxfer_write(struct qat_softc *sc, u_char ae, u_char ctx,
622 qat_aereg_rel_nn_write(struct qat_softc *sc, u_char ae, u_char ctx,
632 qat_aereg_abs_to_rel(struct qat_softc *sc, u_char ae,
637 qat_ae_read_4(sc, ae, CTX_ENABLES, &ctxen);
652 qat_aereg_abs_data_write(struct qat_softc *sc, u_char ae,
659 qat_aereg_abs_to_rel(sc, ae, absreg, &relreg, &ctx);
664 error = qat_aereg_rel_data_write(sc, ae, ctx, AEREG_GPA_REL,
669 error = qat_aereg_rel_data_write(sc, ae, ctx, AEREG_GPB_REL,
674 error = qat_aereg_rel_rdxfer_write(sc, ae, ctx, AEREG_DR_RD_REL,
679 error = qat_aereg_rel_rdxfer_write(sc, ae, ctx, AEREG_SR_RD_REL,
684 error = qat_aereg_rel_wrxfer_write(sc, ae, ctx, AEREG_DR_WR_REL,
689 error = qat_aereg_rel_wrxfer_write(sc, ae, ctx, AEREG_SR_WR_REL,
696 error = qat_aereg_rel_nn_write(sc, ae, ctx, AEREG_NEIGH_REL,
707 qat_ae_enable_ctx(struct qat_softc *sc, u_char ae, u_int ctx_mask)
711 qat_ae_read_4(sc, ae, CTX_ENABLES, &ctxen);
721 qat_ae_write_4(sc, ae, CTX_ENABLES, ctxen);
725 qat_ae_disable_ctx(struct qat_softc *sc, u_char ae, u_int ctx_mask)
729 qat_ae_read_4(sc, ae, CTX_ENABLES, &ctxen);
732 qat_ae_write_4(sc, ae, CTX_ENABLES, ctxen);
736 qat_ae_write_ctx_mode(struct qat_softc *sc, u_char ae, u_char mode)
740 qat_ae_read_4(sc, ae, CTX_ENABLES, &val);
749 qat_ae_write_4(sc, ae, CTX_ENABLES, nval);
753 qat_ae_write_nn_mode(struct qat_softc *sc, u_char ae, u_char mode)
757 qat_ae_read_4(sc, ae, CTX_ENABLES, &val);
766 qat_ae_write_4(sc, ae, CTX_ENABLES, nval);
770 qat_ae_write_lm_mode(struct qat_softc *sc, u_char ae,
776 qat_ae_read_4(sc, ae, CTX_ENABLES, &val);
797 qat_ae_write_4(sc, ae, CTX_ENABLES, nval);
801 qat_ae_write_shared_cs_mode0(struct qat_softc *sc, u_char ae, u_char mode)
805 qat_ae_read_4(sc, ae, AE_MISC_CONTROL, &val);
813 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, nval);
817 qat_ae_write_shared_cs_mode(struct qat_softc *sc, u_char ae, u_char mode)
821 qat_ae_get_shared_ustore_ae(ae, &nae);
823 qat_ae_write_shared_cs_mode0(sc, ae, mode);
831 qat_ae_set_reload_ustore(struct qat_softc *sc, u_char ae,
854 QAT_AE(sc, ae).qae_ustore_dram_addr = ustore_dram_addr;
856 QAT_AE(sc, ae).qae_reload_size = reload_size;
858 qat_ae_read_4(sc, ae, AE_MISC_CONTROL, &val);
863 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, val);
869 qat_ae_get_status(struct qat_softc *sc, u_char ae)
874 error = qat_ae_read_4(sc, ae, CTX_ENABLES, &val);
878 qat_ae_read_4(sc, ae, ACTIVE_CTX_STATUS, &val);
887 qat_ae_is_active(struct qat_softc *sc, u_char ae)
891 if (qat_ae_get_status(sc, ae) != QAT_AE_DISABLED)
894 qat_ae_read_4(sc, ae, ACTIVE_CTX_STATUS, &val);
903 qat_ae_wait_num_cycles(struct qat_softc *sc, u_char ae, int cycles, int check)
908 qat_ae_read_4(sc, ae, PROFILE_COUNT, &cnt);
913 qat_ae_read_4(sc, ae, PROFILE_COUNT, &cnt);
933 if (qat_ae_read_4(sc, ae, ACTIVE_CTX_STATUS,
941 if (check && qat_ae_read_4(sc, ae, ACTIVE_CTX_STATUS, &actx) == 0) {
954 u_char ae;
960 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
961 struct qat_ae *qae = &sc->sc_ae[ae];
989 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
992 qat_ae_read_4(sc, ae, SIGNATURE_ENABLE, &val);
994 qat_ae_write_4(sc, ae, SIGNATURE_ENABLE, val);
1012 u_char ae;
1014 for (ae = 0; ae < sc->sc_ae_num; ae++) {
1015 if ((sc->sc_ae_mask & (1 << ae)) == 0)
1018 error = qat_aefw_start(sc, ae, 0xff);
1022 aprint_verbose_dev(sc->sc_dev, "Started AE %d\n", ae);
1042 u_char ae;
1059 /* Enable clock for AE and QAT */
1073 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
1078 qat_ae_write_4(sc, ae, CTX_ENABLES,
1082 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX,
1087 qat_ae_write_4(sc, ae, CTX_ARB_CNTL,
1091 qat_ae_write_4(sc, ae, CC_ENABLE,
1093 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX,
1096 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX,
1119 int error, times, ae;
1122 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
1127 error = qat_ae_read_4(sc, ae, PROFILE_COUNT, &cnt);
1130 "couldn't access AE %d CSR\n", ae);
1136 error = qat_ae_read_4(sc, ae,
1140 "couldn't access AE %d CSR\n", ae);
1150 "AE %d CSR is useless\n", ae);
1163 u_char ae;
1172 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
1175 qat_ae_write_4(sc, ae, TIMESTAMP_LOW, 0);
1176 qat_ae_write_4(sc, ae, TIMESTAMP_HIGH, 0);
1191 u_char ae;
1193 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
1198 qat_aereg_abs_data_write(sc, ae, AEREG_SR_RD_ABS,
1200 qat_aereg_abs_data_write(sc, ae, AEREG_DR_RD_ABS,
1212 u_char ae;
1215 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
1220 val = qat_ae_read_4(sc, ae, AE_MISC_CONTROL, &val);
1222 ae, AE_MISC_CONTROL, val);
1226 qat_ae_read_4(sc, ae, CTX_ENABLES, &val);
1230 qat_ae_write_4(sc, ae, CTX_ENABLES, val);
1233 qat_ae_ucode_write(sc, ae, 0, __arraycount(ae_clear_gprs_inst),
1237 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX, CTX_STS_INDIRECT,
1241 qat_ae_read_4(sc, ae, ACTIVE_CTX_STATUS, &saved_ctx);
1244 qat_ae_write_4(sc, ae, ACTIVE_CTX_STATUS, 0);
1247 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX,
1251 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX,
1253 qat_ae_write_4(sc, ae, CTX_SIG_EVENTS_ACTIVE, 0);
1255 qat_ae_enable_ctx(sc, ae, AE_ALL_CTX);
1258 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
1261 /* wait for AE to finish */
1263 rv = qat_ae_wait_num_cycles(sc, ae, AE_EXEC_CYCLE, 1);
1270 qat_ae_disable_ctx(sc, ae, AE_ALL_CTX);
1272 qat_ae_write_4(sc, ae, ACTIVE_CTX_STATUS,
1275 qat_ae_write_4(sc, ae, CTX_ENABLES, CTX_ENABLES_INIT);
1277 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX,
1280 qat_ae_write_4(sc, ae, CTX_ARB_CNTL, CTX_ARB_CNTL_INIT);
1282 qat_ae_write_4(sc, ae, CC_ENABLE, CC_ENABLE_INIT);
1283 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX,
1285 qat_ae_ctx_indr_write(sc, ae, AE_ALL_CTX, CTX_SIG_EVENTS_INDIRECT,
1293 qat_ae_get_shared_ustore_ae(u_char ae, u_char *nae)
1295 if (ae & 0x1)
1296 *nae = ae - 1;
1298 *nae = ae + 1;
1346 qat_ae_ucode_write(struct qat_softc *sc, u_char ae, u_int uaddr, u_int ninst,
1353 qat_ae_read_4(sc, ae, USTORE_ADDRESS, &ustore_addr);
1356 qat_ae_write_4(sc, ae, USTORE_ADDRESS, uaddr);
1362 qat_ae_write_4(sc, ae, USTORE_DATA_LOWER, ulo);
1364 qat_ae_write_4(sc, ae, USTORE_DATA_UPPER, uhi);
1368 qat_ae_write_4(sc, ae, USTORE_ADDRESS, ustore_addr);
1374 qat_ae_ucode_read(struct qat_softc *sc, u_char ae, u_int uaddr, u_int ninst,
1381 if (qat_ae_get_status(sc, ae) != QAT_AE_DISABLED)
1384 /* determine whether it neighbour AE runs in shared control store
1386 qat_ae_read_4(sc, ae, AE_MISC_CONTROL, &misc);
1388 qat_ae_get_shared_ustore_ae(ae, &nae);
1398 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, misc & 0xfffffffb);
1403 qat_ae_read_4(sc, ae, USTORE_ADDRESS, &ustore_addr);
1407 qat_ae_write_4(sc, ae, USTORE_ADDRESS, uaddr);
1410 qat_ae_read_4(sc, ae, USTORE_DATA_LOWER, &ulo);
1411 qat_ae_read_4(sc, ae, USTORE_DATA_UPPER, &uhi);
1417 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, misc);
1418 qat_ae_write_4(sc, ae, USTORE_ADDRESS, ustore_addr);
1496 qat_ae_exec_ucode(struct qat_softc *sc, u_char ae, u_char ctx,
1512 if (qat_ae_is_active(sc, ae))
1516 printf("%s: ae %d ctx %d ninst %d code 0x%016llx 0x%016llx\n",
1517 __func__, ae, ctx, ninst, ucode[0], ucode[ninst-1]);
1521 qat_ae_ctx_indr_read(sc, ae, ctx, LM_ADDR_0_INDIRECT, &indr_lm_addr_0);
1522 qat_ae_ctx_indr_read(sc, ae, ctx, LM_ADDR_1_INDIRECT, &indr_lm_addr_1);
1523 qat_ae_ctx_indr_read(sc, ae, ctx, INDIRECT_LM_ADDR_0_BYTE_INDEX,
1525 qat_ae_ctx_indr_read(sc, ae, ctx, INDIRECT_LM_ADDR_1_BYTE_INDEX,
1528 /* backup shared control store bit, and force AE to
1530 qat_ae_read_4(sc, ae, AE_MISC_CONTROL, &misc);
1533 qat_ae_get_shared_ustore_ae(ae, &nae);
1538 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, nmisc);
1542 error = qat_ae_ucode_read(sc, ae, 0, ninst, savucode);
1544 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, misc);
1550 qat_ae_ctx_indr_read(sc, ae, ctx, CTX_WAKEUP_EVENTS_INDIRECT,
1553 qat_ae_ctx_indr_read(sc, ae, ctx, CTX_STS_INDIRECT, &savpc);
1557 qat_ae_read_4(sc, ae, CTX_ENABLES, &ctxen);
1560 qat_ae_read_4(sc, ae, CC_ENABLE, &savcc);
1562 qat_ae_read_4(sc, ae, ACTIVE_CTX_STATUS, &savctx);
1563 qat_ae_read_4(sc, ae, CTX_ARB_CNTL, &ctxarbctl);
1566 qat_ae_ctx_indr_read(sc, ae, ctx, FUTURE_COUNT_SIGNAL_INDIRECT,
1568 qat_ae_ctx_indr_read(sc, ae, ctx, CTX_SIG_EVENTS_INDIRECT, &indr_sig);
1569 qat_ae_read_4(sc, ae, CTX_SIG_EVENTS_ACTIVE, &active_sig);
1572 qat_ae_write_4(sc, ae, CTX_ENABLES,
1576 qat_ae_ucode_write(sc, ae, 0, ninst, ucode);
1578 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, CTX_STS_INDIRECT, 0);
1580 qat_ae_write_4(sc, ae, ACTIVE_CTX_STATUS,
1585 qat_ae_write_4(sc, ae, CC_ENABLE, savcc & 0xffffdfff);
1589 qat_ae_ctx_indr_write(sc, ae, 1 << ctx,
1593 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, CTX_SIG_EVENTS_INDIRECT, 0);
1594 qat_ae_write_4(sc, ae, CTX_SIG_EVENTS_ACTIVE, 0);
1597 qat_ae_enable_ctx(sc, ae, 1 << ctx);
1600 if (qat_ae_wait_num_cycles(sc, ae, max_cycles, 1) != 0)
1607 qat_ae_ctx_indr_read(sc, ae, ctx, CTX_STS_INDIRECT,
1615 qat_ae_ctx_indr_read(sc, ae, ctx, CTX_STS_INDIRECT,
1624 qat_ae_disable_ctx(sc, ae, 1 << ctx);
1627 qat_ae_ucode_write(sc, ae, 0, ninst, savucode);
1630 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, CTX_WAKEUP_EVENTS_INDIRECT,
1632 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, CTX_STS_INDIRECT, savpc);
1635 other bit might be changed by AE code snippet */
1636 qat_ae_read_4(sc, ae, AE_MISC_CONTROL, &misc);
1641 qat_ae_write_4(sc, ae, AE_MISC_CONTROL, nmisc);
1643 qat_ae_write_4(sc, ae, CC_ENABLE, savcc);
1645 qat_ae_write_4(sc, ae, ACTIVE_CTX_STATUS,
1648 qat_ae_write_4(sc, ae, CTX_ARB_CNTL, ctxarbctl);
1650 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, LM_ADDR_0_INDIRECT,
1652 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, LM_ADDR_1_INDIRECT,
1654 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, INDIRECT_LM_ADDR_0_BYTE_INDEX,
1656 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, INDIRECT_LM_ADDR_1_BYTE_INDEX,
1660 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, FUTURE_COUNT_SIGNAL_INDIRECT,
1662 qat_ae_ctx_indr_write(sc, ae, 1 << ctx, CTX_SIG_EVENTS_INDIRECT,
1664 qat_ae_write_4(sc, ae, CTX_SIG_EVENTS_ACTIVE, active_sig);
1667 qat_ae_write_4(sc, ae, CTX_ENABLES, ctxen);
1673 qat_ae_exec_ucode_init_lm(struct qat_softc *sc, u_char ae, u_char ctx,
1679 qat_aereg_rel_data_read(sc, ae, ctx, AEREG_GPA_REL, 0, gpr_a0);
1680 qat_aereg_rel_data_read(sc, ae, ctx, AEREG_GPA_REL, 1, gpr_a1);
1681 qat_aereg_rel_data_read(sc, ae, ctx, AEREG_GPA_REL, 2, gpr_a2);
1682 qat_aereg_rel_data_read(sc, ae, ctx, AEREG_GPB_REL, 0, gpr_b0);
1683 qat_aereg_rel_data_read(sc, ae, ctx, AEREG_GPB_REL, 1, gpr_b1);
1687 return qat_ae_exec_ucode(sc, ae, ctx, ucode, ninst, 1, ninst * 5, NULL);
1691 qat_ae_restore_init_lm_gprs(struct qat_softc *sc, u_char ae, u_char ctx,
1694 qat_aereg_rel_data_write(sc, ae, ctx, AEREG_GPA_REL, 0, gpr_a0);
1695 qat_aereg_rel_data_write(sc, ae, ctx, AEREG_GPA_REL, 1, gpr_a1);
1696 qat_aereg_rel_data_write(sc, ae, ctx, AEREG_GPA_REL, 2, gpr_a2);
1697 qat_aereg_rel_data_write(sc, ae, ctx, AEREG_GPB_REL, 0, gpr_b0);
1698 qat_aereg_rel_data_write(sc, ae, ctx, AEREG_GPB_REL, 1, gpr_b1);
1726 qat_ae_batch_put_lm(struct qat_softc *sc, u_char ae,
1751 error = qat_ae_exec_ucode_init_lm(sc, ae, 0,
1755 qat_ae_restore_init_lm_gprs(sc, ae, 0,
1771 error = qat_ae_exec_ucode_init_lm(sc, ae, 0,
1776 qat_ae_restore_init_lm_gprs(sc, ae, 0,
1786 qat_ae_write_pc(struct qat_softc *sc, u_char ae, u_int ctx_mask, u_int upc)
1789 if (qat_ae_is_active(sc, ae))
1792 qat_ae_ctx_indr_write(sc, ae, ctx_mask, CTX_STS_INDIRECT,
2639 u_char ae;
2646 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
2649 if (!((ae_mode->sam_ae_mask >> ae) & 0x1))
2651 if (qat_ae_is_active(sc, ae)) {
2652 aprint_error_dev(sc->sc_dev, "AE %d is active\n", ae);
2656 FCU_CTRL_CMD_LOAD | __SHIFTIN(ae, FCU_CTRL_AE));
2663 (loaded & (1 << ae))) {
2721 "shared ae mode is not supported yet\n");
2755 "ae %p slice %d page %d assign region %d\n",
2765 qat_aefw_uof_init_ae(struct qat_softc *sc, u_char ae)
2768 struct qat_ae *qae = &(QAT_AE(sc, ae));
2777 qat_ae_write_ctx_mode(sc, ae,
2782 qat_ae_write_nn_mode(sc, ae, nn_mode);
2784 qat_ae_write_lm_mode(sc, ae, AEREG_LMEM0,
2786 qat_ae_write_lm_mode(sc, ae, AEREG_LMEM1,
2789 qat_ae_write_shared_cs_mode(sc, ae,
2791 qat_ae_set_reload_ustore(sc, ae, image->ui_reloadable_size,
2802 int ae, i, error;
2805 for (ae = 0, mask = sc->sc_ae_mask; mask; ae++, mask >>= 1) {
2811 qae = &(QAT_AE(sc, ae));
2815 (1 << ae)) == 0)
2828 error = qat_aefw_uof_init_ae(sc, ae);
2897 qat_aefw_start(struct qat_softc *sc, u_char ae, u_int ctx_mask)
2915 qat_ae_ctx_indr_write(sc, ae, (~ctx_mask) & AE_ALL_CTX,
2918 qat_ae_enable_ctx(sc, ae, ctx_mask);
2952 return 0; /* ae is fused out */
2998 qat_aefw_free_lm_init(struct qat_softc *sc, u_char ae)
3003 while ((qabi = SIMPLEQ_FIRST(&qafu->qafu_lm_init[ae])) != NULL) {
3004 SIMPLEQ_REMOVE_HEAD(&qafu->qafu_lm_init[ae], qabi_next);
3008 qafu->qafu_num_lm_init[ae] = 0;
3009 qafu->qafu_num_lm_init_inst[ae] = 0;
3020 u_char ae, nae;
3050 for (ae = 0; ae < sc->sc_ae_num; ae++) {
3051 KASSERT(ae < UOF_MAX_NUM_OF_AE);
3052 if ((ui->ui_ae_assigned & (1 << ae)) == 0)
3055 if (QAT_AE(sc, ae).qae_shareable_ustore && (ae & 1)) {
3056 qat_ae_get_shared_ustore_ae(ae, &nae);
3057 if (ui->ui_ae_assigned & (1 << ae))
3060 usz = QAT_AE(sc, ae).qae_effect_ustore_size;
3077 if (QAT_AE(sc, ae).qae_shareable_ustore) {
3081 ae,
3098 qat_aefw_init_reg(struct qat_softc *sc, u_char ae, u_char ctx_mask,
3118 error = qat_aereg_rel_data_write(sc, ae, ctx, regtype,
3130 error = qat_aereg_abs_data_write(sc, ae, regtype,
3142 qat_aefw_init_reg_sym_expr(struct qat_softc *sc, u_char ae,
3168 qat_aefw_init_reg(sc, ae, ctx_mask,
3181 qat_aefw_init_reg(sc, ae, 1 << uirs->uirs_ctx,
3205 u_char ae;
3221 "Could not init ae memory: %d\n", error);
3230 for (ae = 0; ae < MAX_AE; ae++) {
3231 error = qat_ae_batch_put_lm(sc, ae, &qafu->qafu_lm_init[ae],
3232 qafu->qafu_num_lm_init_inst[ae]);
3236 qat_aefw_free_lm_init(sc, ae);
3251 u_char ae;
3281 for (ae = 0; ae < sc->sc_ae_num; ae++) {
3282 struct qat_ae *qae = &(QAT_AE(sc, ae));
3291 qat_aefw_init_reg_sym_expr(sc, ae, qas->qas_image);
3332 qat_aefw_do_pagein(struct qat_softc *sc, u_char ae, struct qat_uof_page *qup)
3334 struct qat_ae *qae = &(QAT_AE(sc, ae));
3391 error = qat_ae_ucode_write(sc, ae, upaddr, cpylen,
3417 u_char ae, ctx_mask;
3437 for (ae = 0; ae < sc->sc_ae_num; ae++) {
3438 struct qat_ae *qae = &(QAT_AE(sc, ae));
3442 KASSERT(ae < UOF_MAX_NUM_OF_AE);
3444 if ((ui->ui_ae_assigned & (1 << ae)) == 0)
3465 error = qat_aefw_do_pagein(sc, ae, qap->qap_page);
3490 error = qat_ae_write_pc(sc, ae, ui->ui_ctx_assigned,