Lines Matching defs:xtlb
331 e500_free_tlb1_entry(struct e500_xtlb *xtlb, u_int slot, bool needs_sync)
335 KASSERT(&tlb1->tlb1_entries[slot] == xtlb);
337 KASSERT(xtlb->e_hwtlb.hwtlb_mas0 == (MAS0_TLBSEL_TLB1|__SHIFTIN(slot, MAS0_ESEL)));
338 xtlb->e_hwtlb.hwtlb_mas1 &= ~(MAS1_V|MAS1_IPROT);
339 hwtlb_write(xtlb->e_hwtlb, needs_sync);
683 struct e500_xtlb *xtlb = tlb1->tlb1_entries;
688 for (u_int i = 0; i < tlb1->tlb1_numentries; i++, xtlb++) {
689 psize_t mask = ~(xtlb->e_tlb.tlb_size - 1);
690 if ((xtlb->e_hwtlb.hwtlb_mas1 & MAS1_V)
691 && ((pa ^ xtlb->e_tlb.tlb_pte) & mask) == 0) {
694 return xtlb;
705 struct e500_xtlb *xtlb = tlb1->tlb1_entries;
710 for (u_int i = 0; i < tlb1->tlb1_numentries; i++, xtlb++) {
711 vsize_t mask = ~(xtlb->e_tlb.tlb_size - 1);
712 if ((xtlb->e_hwtlb.hwtlb_mas1 & MAS1_V)
713 && ((va ^ xtlb->e_tlb.tlb_va) & mask) == 0) {
716 return xtlb;
727 struct e500_xtlb *xtlb = tlb1->tlb1_entries;
732 for (u_int i = 0; i < tlb1->tlb1_numentries; i++, xtlb++) {
733 vsize_t mask = ~(xtlb->e_tlb.tlb_size - 1);
734 if ((xtlb->e_hwtlb.hwtlb_mas1 & MAS1_V)
735 && ((va ^ xtlb->e_tlb.tlb_va) & mask) == 0
737 return xtlb;
747 struct e500_xtlb * const xtlb = e500_tlb_lookup_xtlb_pa(pa, NULL);
754 if (xtlb
756 || (xtlb->e_tlb.tlb_pte & PTE_WIG) == (PTE_I|PTE_G))) {
757 xtlb->e_refcnt++;
758 return (void *) (xtlb->e_tlb.tlb_va
759 + pa - (xtlb->e_tlb.tlb_pte & PTE_RPN_MASK));
768 struct e500_xtlb * const xtlb = e500_tlb_lookup_xtlb(va, NULL);
769 if (xtlb)
770 xtlb->e_refcnt--;
778 struct e500_xtlb *xtlb;
787 if ((xtlb = e500_tlb_lookup_xtlb2(va, len)) != NULL) {
788 psize_t mask __diagused = ~(xtlb->e_tlb.tlb_size - 1);
789 KASSERT(len <= xtlb->e_tlb.tlb_size);
790 KASSERT((pte & mask) == (xtlb->e_tlb.tlb_pte & mask));
791 xtlb->e_refcnt++;
799 xtlb = &tlb1->tlb1_entries[slot];
800 xtlb->e_tlb.tlb_va = va;
801 xtlb->e_tlb.tlb_size = len;
802 xtlb->e_tlb.tlb_pte = pte;
803 xtlb->e_tlb.tlb_asid = KERNEL_PID;
805 xtlb->e_hwtlb = tlb_to_hwtlb(xtlb->e_tlb);
806 xtlb->e_hwtlb.hwtlb_mas0 |= __SHIFTIN(slot, MAS0_ESEL);
807 hwtlb_write(xtlb->e_hwtlb, true);
820 struct e500_xtlb * const xtlb = e500_tlb_lookup_xtlb(va, &slot);
822 if (xtlb == NULL)
825 if (xtlb->e_refcnt)
828 e500_free_tlb1_entry(xtlb, slot, true);
857 struct e500_xtlb * const xtlb =
859 xtlb->e_tlb.tlb_asid = KERNEL_PID;
860 xtlb->e_tlb.tlb_size = size;
861 xtlb->e_tlb.tlb_va = lastaddr;
862 xtlb->e_tlb.tlb_pte = lastaddr
930 struct e500_xtlb * const xtlb = &tlb1->tlb1_entries[i];
932 xtlb->e_hwtlb = hwtlb_read(MAS0_TLBSEL_TLB1, i);
934 if ((xtlb->e_hwtlb.hwtlb_mas1 & MAS1_V) == 0) {
942 xtlb->e_tlb = hwtlb_to_tlb(xtlb->e_hwtlb);
945 i, xtlb->e_tlb.tlb_va, xtlb->e_tlb.tlb_size,
946 xtlb->e_tlb.tlb_asid, xtlb->e_tlb.tlb_pte);
948 if ((VM_MIN_KERNEL_ADDRESS <= xtlb->e_tlb.tlb_va
949 && xtlb->e_tlb.tlb_va < VM_MAX_KERNEL_ADDRESS)
950 || (xtlb->e_tlb.tlb_va < VM_MIN_KERNEL_ADDRESS
952 xtlb->e_tlb.tlb_va + xtlb->e_tlb.tlb_size)) {
956 e500_free_tlb1_entry(xtlb, i, false);
962 if ((xtlb->e_hwtlb.hwtlb_mas1 & MAS1_IPROT) == 0) {
963 xtlb->e_hwtlb.hwtlb_mas1 |= MAS1_IPROT;
964 hwtlb_write(xtlb->e_hwtlb, false);
969 if (xtlb->e_tlb.tlb_pte & PTE_I)
972 if (xtlb->e_tlb.tlb_va == 0
973 || xtlb->e_tlb.tlb_va + xtlb->e_tlb.tlb_size <= memsize) {
974 memmapped += xtlb->e_tlb.tlb_size;
980 if ((xtlb->e_hwtlb.hwtlb_mas2 & MAS2_M) == 0) {
981 xtlb->e_hwtlb.hwtlb_mas2 |= MAS2_M;
982 hwtlb_write(xtlb->e_hwtlb, true);
1053 struct e500_xtlb * const xtlb = &tlb1->tlb1_entries[i];
1054 struct e500_hwtlb * const hwtlb = &xtlb->e_hwtlb;
1057 e500_free_tlb1_entry(xtlb, i, false);
1126 struct e500_xtlb *xtlb = e500_tlb_lookup_xtlb(endkernel, &slot);
1128 KASSERT(xtlb == e500_tlb_lookup_xtlb2(0, endkernel));
1133 tmp_xtlb->e_tlb = xtlb->e_tlb;
1174 e500_free_tlb1_entry(xtlb, slot, true);