Lines Matching defs:ric
123 io_range_extend(struct range_infer_ctx *ric, const pci_alloc_t *pal)
125 if (ric->ric_io_bottom > pal->pal_addr)
126 ric->ric_io_bottom = pal->pal_addr;
127 if (ric->ric_io_top < pal->pal_addr + pal->pal_size)
128 ric->ric_io_top = pal->pal_addr + pal->pal_size;
130 return pci_alloc_linkdup(&ric->ric_pals, pal);
134 io_range_extend_by_bar(struct range_infer_ctx *ric, int bus, int dev, int fun,
149 pal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
159 return (pal.pal_size == 0) || io_range_extend(ric, &pal);
163 io_range_extend_by_vga_enable(struct range_infer_ctx *ric,
190 tpal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
204 return io_range_extend(ric, &pal[0]) && io_range_extend(ric, &pal[1]);
208 io_range_extend_by_win(struct range_infer_ctx *ric,
226 pal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
257 return io_range_extend(ric, &pal);
261 io_range_extend_by_cbwin(struct range_infer_ctx *ric,
279 pal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
297 return io_range_extend(ric, &pal);
303 struct range_infer_ctx *ric = ctx;
318 ok = ok && io_range_extend_by_win(ric, bus, dev, fun,
323 ok = ok && io_range_extend_by_vga_enable(ric, bus, dev, fun,
331 ok = ok && io_range_extend_by_cbwin(ric, bus, dev, fun,
335 ok = ok && io_range_extend_by_cbwin(ric, bus, dev, fun,
361 ok = ok && io_range_extend_by_bar(ric, bus, dev, fun,
371 mmio_range_extend(struct range_infer_ctx *ric, const pci_alloc_t *pal)
373 if (ric->ric_mmio_bottom > pal->pal_addr)
374 ric->ric_mmio_bottom = pal->pal_addr;
375 if (ric->ric_mmio_top < pal->pal_addr + pal->pal_size)
376 ric->ric_mmio_top = pal->pal_addr + pal->pal_size;
378 return pci_alloc_linkdup(&ric->ric_pals, pal);
382 mmio_range_extend_by_bar(struct range_infer_ctx *ric, int bus, int dev,
399 pal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
428 return (pal.pal_size == 0) || mmio_range_extend(ric, &pal);
432 mmio_range_extend_by_vga_enable(struct range_infer_ctx *ric,
459 tpal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
468 return mmio_range_extend(ric, &pal);
472 mmio_range_extend_by_win(struct range_infer_ctx *ric,
489 pal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
508 return mmio_range_extend(ric, &pal);
512 mmio_range_extend_by_prememwin(struct range_infer_ctx *ric,
531 pal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
561 return mmio_range_extend(ric, &pal);
565 mmio_range_extend_by_cbwin(struct range_infer_ctx *ric,
586 pal.pal_tag = pci_make_tag(ric->ric_pc, bus, dev, fun);
604 return mmio_range_extend(ric, &pal);
610 struct range_infer_ctx *ric = ctx;
625 ok = ok && mmio_range_extend_by_win(ric, bus, dev, fun,
639 mmio_range_extend_by_prememwin(ric, bus, dev, fun,
643 mmio_range_extend_by_vga_enable(ric, bus, dev, fun,
652 ok = ok && mmio_range_extend_by_cbwin(ric, bus, dev, fun,
657 ok = ok && mmio_range_extend_by_cbwin(ric, bus, dev, fun,
684 ok = ok && mmio_range_extend_by_bar(ric, bus, dev, fun,
866 struct range_infer_ctx ric = {
871 , .ric_pals = TAILQ_HEAD_INITIALIZER(ric.ric_pals)
875 ric.ric_pc = pc;
876 pci_device_foreach_min(pc, minbus, maxbus, mmio_range_infer, &ric);
877 pci_device_foreach_min(pc, minbus, maxbus, io_range_infer, &ric);
879 *membasep = ric.ric_mmio_bottom;
881 *memsizep = ric.ric_mmio_top - ric.ric_mmio_bottom;
883 *iobasep = ric.ric_io_bottom;
885 *iosizep = ric.ric_io_top - ric.ric_io_bottom;
888 (uintmax_t)(ric.ric_mmio_top - ric.ric_mmio_bottom),
889 (uintmax_t)ric.ric_mmio_bottom);
892 (uintmax_t)(ric.ric_io_top - ric.ric_io_bottom),
893 (uintmax_t)ric.ric_io_bottom);
894 TAILQ_FOREACH(pal, &ric.ric_pals, pal_link)
907 } else if (!pci_range_record(pc, memrsvns, &ric.ric_pals,
912 "start", ric.ric_mmio_bottom) ||
914 ric.ric_mmio_top - ric.ric_mmio_bottom)) {
927 } else if (!pci_range_record(pc, iorsvns, &ric.ric_pals,
932 "start", ric.ric_io_bottom) ||
934 ric.ric_io_top - ric.ric_io_bottom)) {