t_uvm_physseg.c revision 1.1 1 /* $NetBSD: t_uvm_physseg.c,v 1.1 2016/12/19 12:21:29 cherry Exp $ */
2
3 /*-
4 * Copyright (c) 2015, 2016 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Santhosh N. Raju <santhosh.raju (at) gmail.com> and
9 * by Cherry G. Mathew
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 * 1. Redistributions of source code must retain the above copyright
15 * notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 * notice, this list of conditions and the following disclaimer in the
18 * documentation and/or other materials provided with the distribution.
19 *
20 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
21 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
22 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
24 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
25 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
29 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30 * POSSIBILITY OF SUCH DAMAGE.
31 */
32
33 #include <sys/cdefs.h>
34 __RCSID("$NetBSD: t_uvm_physseg.c,v 1.1 2016/12/19 12:21:29 cherry Exp $");
35
36 /* Testing API - assumes userland */
37 /* Provide Kernel API equivalents */
38 #include <assert.h>
39 #include <errno.h>
40 #include <stdbool.h>
41 #include <string.h> /* memset(3) et. al */
42 #include <stdio.h> /* printf(3) */
43 #include <stdlib.h> /* malloc(3) */
44 #include <stdarg.h>
45 #include <stddef.h>
46
47 #define PRIxPADDR "lx"
48 #define PRIxPSIZE "lx"
49 #define PRIuPSIZE "lu"
50 #define PRIxVADDR "lx"
51 #define PRIxVSIZE "lx"
52 #define PRIuVSIZE "lu"
53
54 #define UVM_HOTPLUG /* Enable hotplug with rbtree. */
55 #define PMAP_STEAL_MEMORY
56 #define DEBUG /* Enable debug functionality. */
57
58 typedef unsigned long vaddr_t;
59 typedef unsigned long paddr_t;
60 typedef unsigned long psize_t;
61 typedef unsigned long vsize_t;
62
63 #include <uvm/uvm_page.h>
64
65 /*
66 * If this line is commented out tests related to uvm_physseg_get_pmseg()
67 * wont run.
68 *
69 * Have a look at machine/uvm_physseg.h for more details.
70 */
71 #define __HAVE_PMAP_PHYSSEG
72
73 #include <uvm/uvm_physseg.h>
74
75 /*
76 * This is a dummy struct used for testing purposes
77 *
78 * In reality this struct would exist in the MD part of the code residing in
79 * machines/vmparam.h
80 */
81
82 #ifdef __HAVE_PMAP_PHYSSEG
83 struct pmap_physseg {
84 bool dummy_variable; /* Dummy variable use for testing */
85 };
86 #endif
87
88 #ifndef DIAGNOSTIC
89 #define KASSERTMSG(e, msg, ...) /* NOTHING */
90 #define KASSERT(e) /* NOTHING */
91 #else
92 #define KASSERT(a) assert(a)
93 #define KASSERTMSG(exp, ...) printf(__VA_ARGS__); assert((exp))
94 #endif
95
96 #define VM_PHYSSEG_STRAT VM_PSTRAT_BSEARCH
97
98 #define VM_NFREELIST 4
99 #define VM_FREELIST_DEFAULT 0
100 #define VM_FREELIST_FIRST16 3
101 #define VM_FREELIST_FIRST1G 2
102 #define VM_FREELIST_FIRST4G 1
103
104 /*
105 * Used in tests when Array implementation is tested
106 */
107 #if !defined(VM_PHYSSEG_MAX)
108 #define VM_PHYSSEG_MAX 1
109 #endif
110
111 #define PAGE_SHIFT 12
112 #define PAGE_SIZE (1 << PAGE_SHIFT)
113 #define PAGE_MASK (PAGE_SIZE - 1)
114 #define atop(x) (((paddr_t)(x)) >> PAGE_SHIFT)
115 #define ptoa(x) (((paddr_t)(x)) << PAGE_SHIFT)
116
117 #define mutex_enter(l)
118 #define mutex_exit(l)
119
120 psize_t physmem;
121
122 struct uvmexp uvmexp; /* decl */
123
124 /*
125 * uvm structure borrowed from uvm.h
126 *
127 * Remember this is a dummy structure used within the ATF Tests and
128 * uses only necessary fields from the original uvm struct.
129 * See uvm/uvm.h for the full struct.
130 */
131
132 struct uvm {
133 /* vm_page related parameters */
134
135 bool page_init_done; /* TRUE if uvm_page_init() finished */
136 } uvm;
137
138 #include <sys/kmem.h>
139
140 void *
141 kmem_alloc(size_t size, km_flag_t flags)
142 {
143 return malloc(size);
144 }
145
146 void *
147 kmem_zalloc(size_t size, km_flag_t flags)
148 {
149 void *ptr;
150 ptr = malloc(size);
151
152 memset(ptr, 0, size);
153
154 return ptr;
155 }
156
157 void
158 kmem_free(void *mem, size_t size)
159 {
160 free(mem);
161 }
162
163 static void
164 panic(const char *fmt, ...)
165 {
166 va_list ap;
167
168 va_start(ap, fmt);
169 vprintf(fmt, ap);
170 printf("\n");
171 va_end(ap);
172 KASSERT(false);
173
174 /*NOTREACHED*/
175 }
176
177 static void
178 uvm_pagefree(struct vm_page *pg)
179 {
180 return;
181 }
182
183 #if defined(UVM_HOTPLUG)
184 static void
185 uvmpdpol_reinit(void)
186 {
187 return;
188 }
189 #endif /* UVM_HOTPLUG */
190
191 /* end - Provide Kernel API equivalents */
192
193
194 #include "uvm/uvm_physseg.c"
195
196 #include <atf-c.h>
197
198 #define SIXTYFOUR_KILO (64 * 1024)
199 #define ONETWENTYEIGHT_KILO (128 * 1024)
200 #define TWOFIFTYSIX_KILO (256 * 1024)
201 #define FIVEONETWO_KILO (512 * 1024)
202 #define ONE_MEGABYTE (1024 * 1024)
203 #define TWO_MEGABYTE (2 * 1024 * 1024)
204
205 /* Sample Page Frame Numbers */
206 #define VALID_START_PFN_1 atop(0)
207 #define VALID_END_PFN_1 atop(ONE_MEGABYTE)
208 #define VALID_AVAIL_START_PFN_1 atop(0)
209 #define VALID_AVAIL_END_PFN_1 atop(ONE_MEGABYTE)
210
211 #define VALID_START_PFN_2 atop(ONE_MEGABYTE + 1)
212 #define VALID_END_PFN_2 atop(ONE_MEGABYTE * 2)
213 #define VALID_AVAIL_START_PFN_2 atop(ONE_MEGABYTE + 1)
214 #define VALID_AVAIL_END_PFN_2 atop(ONE_MEGABYTE * 2)
215
216 #define VALID_START_PFN_3 atop((ONE_MEGABYTE * 2) + 1)
217 #define VALID_END_PFN_3 atop(ONE_MEGABYTE * 3)
218 #define VALID_AVAIL_START_PFN_3 atop((ONE_MEGABYTE * 2) + 1)
219 #define VALID_AVAIL_END_PFN_3 atop(ONE_MEGABYTE * 3)
220
221 #define VALID_START_PFN_4 atop((ONE_MEGABYTE * 3) + 1)
222 #define VALID_END_PFN_4 atop(ONE_MEGABYTE * 4)
223 #define VALID_AVAIL_START_PFN_4 atop((ONE_MEGABYTE * 3) + 1)
224 #define VALID_AVAIL_END_PFN_4 atop(ONE_MEGABYTE * 4)
225
226 /*
227 * Total number of pages (of 4K size each) should be 256 for 1MB of memory.
228 */
229 #define PAGE_COUNT_1M 256
230
231 /*
232 * A debug fucntion to print the content of upm.
233 */
234 static inline void
235 uvm_physseg_dump_seg(uvm_physseg_t upm)
236 {
237 #if defined(DEBUG)
238 printf("%s: seg->start == %ld\n", __func__,
239 uvm_physseg_get_start(upm));
240 printf("%s: seg->end == %ld\n", __func__,
241 uvm_physseg_get_end(upm));
242 printf("%s: seg->avail_start == %ld\n", __func__,
243 uvm_physseg_get_avail_start(upm));
244 printf("%s: seg->avail_end == %ld\n", __func__,
245 uvm_physseg_get_avail_end(upm));
246
247 printf("====\n\n");
248 #else
249 return;
250 #endif /* DEBUG */
251 }
252
253 /*
254 * Private accessor that gets the value of uvm_physseg_graph.nentries
255 */
256 static int
257 uvm_physseg_get_entries(void)
258 {
259 #if defined(UVM_HOTPLUG)
260 return uvm_physseg_graph.nentries;
261 #else
262 return vm_nphysmem;
263 #endif /* UVM_HOTPLUG */
264 }
265
266 #if !defined(UVM_HOTPLUG)
267 static void *
268 uvm_physseg_alloc(size_t sz)
269 {
270 return &vm_physmem[vm_nphysseg++];
271 }
272 #endif
273
274 /*
275 * Test Fixture SetUp().
276 */
277 static void
278 setup(void)
279 {
280 /* Prerequisites for running certain calls in uvm_physseg */
281 uvmexp.pagesize = PAGE_SIZE;
282 uvmexp.npages = 0;
283 uvm.page_init_done = false;
284 uvm_physseg_init();
285 }
286
287
288 /* <---- Tests for Internal functions ----> */
289 #if defined(UVM_HOTPLUG)
290 ATF_TC(uvm_physseg_alloc_atboot_mismatch);
291 ATF_TC_HEAD(uvm_physseg_alloc_atboot_mismatch, tc)
292 {
293 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_alloc() sanity"
294 "size mismatch alloc() test.");
295 }
296
297 ATF_TC_BODY(uvm_physseg_alloc_atboot_mismatch, tc)
298 {
299 uvm.page_init_done = false;
300
301 atf_tc_expect_signal(SIGABRT, "size mismatch alloc()");
302
303 uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1);
304 }
305
306 ATF_TC(uvm_physseg_alloc_atboot_overrun);
307 ATF_TC_HEAD(uvm_physseg_alloc_atboot_overrun, tc)
308 {
309 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_alloc() sanity"
310 "array overrun alloc() test.");
311 }
312
313 ATF_TC_BODY(uvm_physseg_alloc_atboot_overrun, tc)
314 {
315 uvm.page_init_done = false;
316
317 atf_tc_expect_signal(SIGABRT, "array overrun alloc()");
318
319 uvm_physseg_alloc((VM_PHYSSEG_MAX + 1) * sizeof(struct uvm_physseg));
320
321 }
322
323 ATF_TC(uvm_physseg_alloc_sanity);
324 ATF_TC_HEAD(uvm_physseg_alloc_sanity, tc)
325 {
326 atf_tc_set_md_var(tc, "descr", "further uvm_physseg_alloc() sanity checks");
327 }
328
329 ATF_TC_BODY(uvm_physseg_alloc_sanity, tc)
330 {
331
332 /* At boot time */
333 uvm.page_init_done = false;
334
335 /* Correct alloc */
336 ATF_REQUIRE(uvm_physseg_alloc(VM_PHYSSEG_MAX * sizeof(struct uvm_physseg)));
337
338 /* Retry static alloc()s as dynamic - we expect them to pass */
339 uvm.page_init_done = true;
340 ATF_REQUIRE(uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1));
341 ATF_REQUIRE(uvm_physseg_alloc(2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg)));
342 }
343
344 ATF_TC(uvm_physseg_free_atboot_mismatch);
345 ATF_TC_HEAD(uvm_physseg_free_atboot_mismatch, tc)
346 {
347 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_free() sanity"
348 "size mismatch free() test.");
349 }
350
351 ATF_TC_BODY(uvm_physseg_free_atboot_mismatch, tc)
352 {
353 uvm.page_init_done = false;
354
355 atf_tc_expect_signal(SIGABRT, "size mismatch free()");
356
357 uvm_physseg_free(&uvm_physseg[0], sizeof(struct uvm_physseg) - 1);
358 }
359
360 ATF_TC(uvm_physseg_free_sanity);
361 ATF_TC_HEAD(uvm_physseg_free_sanity, tc)
362 {
363 atf_tc_set_md_var(tc, "descr", "further uvm_physseg_free() sanity checks");
364 }
365
366 ATF_TC_BODY(uvm_physseg_free_sanity, tc)
367 {
368
369 /* At boot time */
370 uvm.page_init_done = false;
371
372 struct uvm_physseg *seg;
373
374 #if VM_PHYSSEG_MAX > 1
375 /*
376 * Note: free()ing the entire array is considered to be an
377 * error. Thus VM_PHYSSEG_MAX - 1.
378 */
379
380 seg = uvm_physseg_alloc((VM_PHYSSEG_MAX - 1) * sizeof(*seg));
381 uvm_physseg_free(seg, (VM_PHYSSEG_MAX - 1) * sizeof(struct uvm_physseg));
382 #endif
383
384 /* Retry static alloc()s as dynamic - we expect them to pass */
385 uvm.page_init_done = true;
386
387 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1);
388 uvm_physseg_free(seg, sizeof(struct uvm_physseg) - 1);
389
390 seg = uvm_physseg_alloc(2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg));
391
392 uvm_physseg_free(seg, 2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg));
393 }
394
395 #if VM_PHYSSEG_MAX > 1
396 ATF_TC(uvm_physseg_atboot_free_leak);
397 ATF_TC_HEAD(uvm_physseg_atboot_free_leak, tc)
398 {
399 atf_tc_set_md_var(tc, "descr",
400 "does free() leak at boot ?\n"
401 "This test needs VM_PHYSSEG_MAX > 1)");
402 }
403
404 ATF_TC_BODY(uvm_physseg_atboot_free_leak, tc)
405 {
406
407 /* At boot time */
408 uvm.page_init_done = false;
409
410 /* alloc to array size */
411 struct uvm_physseg *seg;
412 seg = uvm_physseg_alloc(VM_PHYSSEG_MAX * sizeof(*seg));
413
414 uvm_physseg_free(seg, sizeof(*seg));
415
416 atf_tc_expect_signal(SIGABRT, "array overrun on alloc() after leak");
417
418 ATF_REQUIRE(uvm_physseg_alloc(sizeof(struct uvm_physseg)));
419 }
420 #endif /* VM_PHYSSEG_MAX */
421 #endif /* UVM_HOTPLUG */
422
423 /*
424 * Note: This function replicates verbatim what happens in
425 * uvm_page.c:uvm_page_init().
426 *
427 * Please track any changes that happen there.
428 */
429 static void
430 uvm_page_init_fake(struct vm_page *pagearray, psize_t pagecount)
431 {
432 uvm_physseg_t bank;
433 size_t n;
434
435 for (bank = uvm_physseg_get_first(),
436 uvm_physseg_seg_chomp_slab(bank, pagearray, pagecount);
437 uvm_physseg_valid(bank);
438 bank = uvm_physseg_get_next(bank)) {
439
440 n = uvm_physseg_get_end(bank) - uvm_physseg_get_start(bank);
441 uvm_physseg_seg_alloc_from_slab(bank, n);
442 uvm_physseg_init_seg(bank, pagearray);
443
444 /* set up page array pointers */
445 pagearray += n;
446 pagecount -= n;
447 }
448
449 uvm.page_init_done = true;
450 }
451
452 ATF_TC(uvm_physseg_plug);
453 ATF_TC_HEAD(uvm_physseg_plug, tc)
454 {
455 atf_tc_set_md_var(tc, "descr",
456 "Test plug functionality.");
457 }
458 /* Note: We only do the second boot time plug if VM_PHYSSEG_MAX > 1 */
459 ATF_TC_BODY(uvm_physseg_plug, tc)
460 {
461 int nentries = 0; /* Count of entries via plug done so far */
462 uvm_physseg_t upm1;
463 #if VM_PHYSSEG_MAX > 2
464 uvm_physseg_t upm2;
465 #endif
466
467 #if VM_PHYSSEG_MAX > 1
468 uvm_physseg_t upm3;
469 #endif
470 uvm_physseg_t upm4;
471 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
472 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
473 psize_t npages3 = (VALID_END_PFN_3 - VALID_START_PFN_3);
474 psize_t npages4 = (VALID_END_PFN_4 - VALID_START_PFN_4);
475 struct vm_page *pgs, *slab = malloc(sizeof(struct vm_page) * (npages1
476 #if VM_PHYSSEG_MAX > 2
477 + npages2
478 #endif
479 + npages3));
480
481 /* Fake early boot */
482
483 setup();
484
485 /* Vanilla plug x 2 */
486 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_1, npages1, &upm1), true);
487 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
488 ATF_REQUIRE_EQ(0, uvmexp.npages);
489
490 #if VM_PHYSSEG_MAX > 2
491 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_2, npages2, &upm2), true);
492 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
493 ATF_REQUIRE_EQ(0, uvmexp.npages);
494 #endif
495 /* Post boot: Fake all segments and pages accounted for. */
496 uvm_page_init_fake(slab, npages1 + npages2 + npages3);
497
498 ATF_CHECK_EQ(npages1
499 #if VM_PHYSSEG_MAX > 2
500 + npages2
501 #endif
502 , uvmexp.npages);
503 #if VM_PHYSSEG_MAX > 1
504 /* Scavenge plug - goes into the same slab */
505 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_3, npages3, &upm3), true);
506 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
507 ATF_REQUIRE_EQ(npages1
508 #if VM_PHYSSEG_MAX > 2
509 + npages2
510 #endif
511 + npages3, uvmexp.npages);
512
513 /* Scavenge plug should fit right in the slab */
514 pgs = uvm_physseg_get_pg(upm3, 0);
515 ATF_REQUIRE(pgs > slab && pgs < (slab + npages1 + npages2 + npages3));
516 #endif
517 /* Hot plug - goes into a brand new slab */
518 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_4, npages4, &upm4), true);
519 /* The hot plug slab should have nothing to do with the original slab */
520 pgs = uvm_physseg_get_pg(upm4, 0);
521 ATF_REQUIRE(pgs < slab || pgs > (slab + npages1
522 #if VM_PHYSSEG_MAX > 2
523 + npages2
524 #endif
525 + npages3));
526
527 }
528 ATF_TC(uvm_physseg_unplug);
529 ATF_TC_HEAD(uvm_physseg_unplug, tc)
530 {
531 atf_tc_set_md_var(tc, "descr",
532 "Test unplug functionality.");
533 }
534 ATF_TC_BODY(uvm_physseg_unplug, tc)
535 {
536 paddr_t pa = 0;
537
538 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
539 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
540 psize_t npages3 = (VALID_END_PFN_3 - VALID_START_PFN_3);
541
542 struct vm_page *slab = malloc(sizeof(struct vm_page) * (npages1 + npages2 + npages3));
543
544 uvm_physseg_t upm;
545
546 /* Boot time */
547 setup();
548
549 /* We start with zero segments */
550 ATF_REQUIRE_EQ(true, uvm_physseg_plug(atop(0), atop(ONE_MEGABYTE), NULL));
551 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
552 /* Do we have an arbitrary offset in there ? */
553 uvm_physseg_find(atop(TWOFIFTYSIX_KILO), &pa);
554 ATF_REQUIRE_EQ(pa, atop(TWOFIFTYSIX_KILO));
555 ATF_REQUIRE_EQ(0, uvmexp.npages); /* Boot time sanity */
556
557 #if VM_PHYSSEG_MAX == 1
558 /*
559 * This is the curious case at boot time, of having one
560 * extent(9) static entry per segment, which means that a
561 * fragmenting unplug will fail.
562 */
563 atf_tc_expect_signal(SIGABRT, "fragmenting unplug for single segment");
564
565 /*
566 * In order to test the fragmenting cases, please set
567 * VM_PHYSSEG_MAX > 1
568 */
569 #endif
570 /* Now let's unplug from the middle */
571 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(TWOFIFTYSIX_KILO), atop(FIVEONETWO_KILO)));
572 /* verify that a gap exists at TWOFIFTYSIX_KILO */
573 pa = 0; /* reset */
574 uvm_physseg_find(atop(TWOFIFTYSIX_KILO), &pa);
575 ATF_REQUIRE_EQ(pa, 0);
576
577 /* Post boot: Fake all segments and pages accounted for. */
578 uvm_page_init_fake(slab, npages1 + npages2 + npages3);
579 /* Account for the unplug */
580 ATF_CHECK_EQ(atop(FIVEONETWO_KILO), uvmexp.npages);
581
582 /* Original entry should fragment into two */
583 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
584
585 upm = uvm_physseg_find(atop(TWOFIFTYSIX_KILO + FIVEONETWO_KILO), NULL);
586
587 ATF_REQUIRE(uvm_physseg_valid(upm));
588
589 /* Now unplug the tail fragment - should swallow the complete entry */
590 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(TWOFIFTYSIX_KILO + FIVEONETWO_KILO), atop(TWOFIFTYSIX_KILO)));
591
592 /* The "swallow" above should have invalidated the handle */
593 ATF_REQUIRE_EQ(false, uvm_physseg_valid(upm));
594
595 /* Only the first one is left now */
596 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
597
598 /* Unplug from the back */
599 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(ONETWENTYEIGHT_KILO), atop(ONETWENTYEIGHT_KILO)));
600 /* Shouldn't change the number of segments */
601 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
602
603 /* Unplug from the front */
604 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(0, atop(SIXTYFOUR_KILO)));
605 /* Shouldn't change the number of segments */
606 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
607
608 /* Unplugging the final fragment should fail */
609 atf_tc_expect_signal(SIGABRT, "Unplugging the last segment");
610 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(SIXTYFOUR_KILO), atop(SIXTYFOUR_KILO)));
611 }
612
613
614 /* <---- end Tests for Internal functions ----> */
615
616 /* Tests for functions exported via uvm_physseg.h */
617 ATF_TC(uvm_physseg_init);
618 ATF_TC_HEAD(uvm_physseg_init, tc)
619 {
620 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_init() call\
621 initializes the vm_physmem struct which holds the rb_tree.");
622 }
623 ATF_TC_BODY(uvm_physseg_init, tc)
624 {
625 uvm_physseg_init();
626
627 ATF_REQUIRE_EQ(0, uvm_physseg_get_entries());
628 }
629
630 ATF_TC(uvm_page_physload_preload);
631 ATF_TC_HEAD(uvm_page_physload_preload, tc)
632 {
633 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
634 call works without a panic() in a preload scenario.");
635 }
636 ATF_TC_BODY(uvm_page_physload_preload, tc)
637 {
638 uvm_physseg_t upm;
639
640 setup();
641
642 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
643 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
644
645 /* Should return a valid handle */
646 ATF_REQUIRE(uvm_physseg_valid(upm));
647
648 /* No pages should be allocated yet */
649 ATF_REQUIRE_EQ(0, uvmexp.npages);
650
651 /* After the first call one segment should exist */
652 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
653
654 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
655 #if VM_PHYSSEG_MAX > 1
656 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
657 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
658
659 /* Should return a valid handle */
660 ATF_REQUIRE(uvm_physseg_valid(upm));
661
662 ATF_REQUIRE_EQ(0, uvmexp.npages);
663
664 /* After the second call two segments should exist */
665 ATF_CHECK_EQ(2, uvm_physseg_get_entries());
666 #endif
667 }
668
669 ATF_TC(uvm_page_physload_postboot);
670 ATF_TC_HEAD(uvm_page_physload_postboot, tc)
671 {
672 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
673 panic()s in a post boot scenario.");
674 }
675 ATF_TC_BODY(uvm_page_physload_postboot, tc)
676 {
677 uvm_physseg_t upm;
678
679 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
680 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
681
682 struct vm_page *slab = malloc(sizeof(struct vm_page) * (npages1 + npages2));
683
684 setup();
685
686 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
687 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
688
689 /* Should return a valid handle */
690 ATF_REQUIRE(uvm_physseg_valid(upm));
691
692 /* No pages should be allocated yet */
693 ATF_REQUIRE_EQ(0, uvmexp.npages);
694
695 /* After the first call one segment should exist */
696 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
697
698 /* Post boot: Fake all segments and pages accounted for. */
699 uvm_page_init_fake(slab, npages1 + npages2);
700
701 atf_tc_expect_signal(SIGABRT,
702 "uvm_page_physload() called post boot");
703
704 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
705 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
706
707 /* Should return a valid handle */
708 ATF_REQUIRE(uvm_physseg_valid(upm));
709
710 ATF_REQUIRE_EQ(npages1 + npages2, uvmexp.npages);
711
712 /* After the second call two segments should exist */
713 ATF_CHECK_EQ(2, uvm_physseg_get_entries());
714 }
715
716 ATF_TC(uvm_physseg_handle_immutable);
717 ATF_TC_HEAD(uvm_physseg_handle_immutable, tc)
718 {
719 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_physseg_t handle is \
720 immutable.");
721 }
722 ATF_TC_BODY(uvm_physseg_handle_immutable, tc)
723 {
724 uvm_physseg_t upm;
725
726 /* We insert the segments in out of order */
727
728 setup();
729
730 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
731 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
732
733 ATF_REQUIRE_EQ(0, uvmexp.npages);
734
735 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
736
737 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY, uvm_physseg_get_prev(upm));
738
739 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
740 #if VM_PHYSSEG_MAX > 1
741 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
742 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
743
744 ATF_REQUIRE_EQ(0, uvmexp.npages);
745
746 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
747
748 /* Fetch Previous, we inserted a lower value */
749 upm = uvm_physseg_get_prev(upm);
750
751 #if !defined(UVM_HOTPLUG)
752 /*
753 * This test is going to fail for the Array Implementation but is
754 * expected to pass in the RB Tree implementation.
755 */
756 /* Failure can be expected iff there are more than one handles */
757 atf_tc_expect_fail("Mutable handle in static array impl.");
758 #endif
759 ATF_CHECK(UVM_PHYSSEG_TYPE_INVALID_EMPTY != upm);
760 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
761 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
762 #endif
763 }
764
765 ATF_TC(uvm_physseg_seg_chomp_slab);
766 ATF_TC_HEAD(uvm_physseg_seg_chomp_slab, tc)
767 {
768 atf_tc_set_md_var(tc, "descr", "The slab import code.()");
769
770 }
771 ATF_TC_BODY(uvm_physseg_seg_chomp_slab, tc)
772 {
773 int err;
774 size_t i;
775 struct uvm_physseg *seg;
776 struct vm_page *slab, *pgs;
777 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
778
779 setup();
780
781 /* This is boot time */
782 slab = malloc(sizeof(struct vm_page) * npages * 2);
783
784 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
785
786 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
787
788 /* Should be able to allocate two 128 * sizeof(*slab) */
789 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
790 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
791
792 #if VM_PHYSSEG_MAX == 1
793 /*
794 * free() needs an extra region descriptor, but we only have
795 * one! The classic alloc() at free() problem
796 */
797
798 ATF_REQUIRE_EQ(ENOMEM, err);
799 #else
800 /* Try alloc/free at static time */
801 for (i = 0; i < npages; i++) {
802 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
803 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
804 ATF_REQUIRE_EQ(0, err);
805 }
806 #endif
807
808 /* Now setup post boot */
809 uvm.page_init_done = true;
810
811 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
812
813 /* Try alloc/free after uvm_page.c:uvm_page_init() as well */
814 for (i = 0; i < npages; i++) {
815 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
816 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
817 ATF_REQUIRE_EQ(0, err);
818 }
819
820 }
821
822 ATF_TC(uvm_physseg_alloc_from_slab);
823 ATF_TC_HEAD(uvm_physseg_alloc_from_slab, tc)
824 {
825 atf_tc_set_md_var(tc, "descr", "The slab alloc code.()");
826
827 }
828 ATF_TC_BODY(uvm_physseg_alloc_from_slab, tc)
829 {
830 struct uvm_physseg *seg;
831 struct vm_page *slab, *pgs;
832 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
833
834 setup();
835
836 /* This is boot time */
837 slab = malloc(sizeof(struct vm_page) * npages * 2);
838
839 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
840
841 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
842
843 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
844
845 ATF_REQUIRE(pgs != NULL);
846
847 /* Now setup post boot */
848 uvm.page_init_done = true;
849
850 #if VM_PHYSSEG_MAX > 1
851 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
852 ATF_REQUIRE(pgs != NULL);
853 #endif
854 atf_tc_expect_fail("alloc beyond extent");
855
856 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
857 ATF_REQUIRE(pgs != NULL);
858 }
859
860 ATF_TC(uvm_physseg_init_seg);
861 ATF_TC_HEAD(uvm_physseg_init_seg, tc)
862 {
863 atf_tc_set_md_var(tc, "descr", "Tests if uvm_physseg_init_seg adds pages to"
864 "uvmexp.npages");
865 }
866 ATF_TC_BODY(uvm_physseg_init_seg, tc)
867 {
868 struct uvm_physseg *seg;
869 struct vm_page *slab, *pgs;
870 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
871
872 setup();
873
874 /* This is boot time */
875 slab = malloc(sizeof(struct vm_page) * npages * 2);
876
877 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
878
879 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
880
881 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
882
883 ATF_REQUIRE_EQ(0, uvmexp.npages);
884
885 seg->start = 0;
886 seg->end = npages;
887
888 seg->avail_start = 0;
889 seg->avail_end = npages;
890
891 uvm_physseg_init_seg(PHYSSEG_NODE_TO_HANDLE(seg), pgs);
892
893 ATF_REQUIRE_EQ(npages, uvmexp.npages);
894 }
895
896 #if 0
897 ATF_TC(uvm_physseg_init_seg);
898 ATF_TC_HEAD(uvm_physseg_init_seg, tc)
899 {
900 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
901 call works without a panic() after Segment is inited.");
902 }
903 ATF_TC_BODY(uvm_physseg_init_seg, tc)
904 {
905 uvm_physseg_t upm;
906 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
907 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
908
909 setup();
910 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
911 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
912
913 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
914
915 ATF_CHECK_EQ(0, uvmexp.npages);
916
917 /*
918 * Boot time physplug needs explicit external init,
919 * Duplicate what uvm_page.c:uvm_page_init() does.
920 * Note: not everything uvm_page_init() does gets done here.
921 * Read the source.
922 */
923 /* suck in backing slab, initialise extent. */
924 uvm_physseg_seg_chomp_slab(upm, pgs, npages);
925
926 /*
927 * Actual pgs[] allocation, from extent.
928 */
929 uvm_physseg_alloc_from_slab(upm, npages);
930
931 /* Now we initialize the segment */
932 uvm_physseg_init_seg(upm, pgs);
933
934 /* Done with boot simulation */
935 extent_init();
936 uvm.page_init_done = true;
937
938 /* We have total memory of 1MB */
939 ATF_CHECK_EQ(PAGE_COUNT_1M, uvmexp.npages);
940
941 upm =uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
942 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
943 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
944
945 /* We added another 1MB so PAGE_COUNT_1M + PAGE_COUNT_1M */
946 ATF_CHECK_EQ(PAGE_COUNT_1M + PAGE_COUNT_1M, uvmexp.npages);
947
948 }
949 #endif
950
951 ATF_TC(uvm_physseg_get_start);
952 ATF_TC_HEAD(uvm_physseg_get_start, tc)
953 {
954 atf_tc_set_md_var(tc, "descr", "Tests if the start PFN is returned \
955 correctly from a segment created via uvm_page_physload().");
956 }
957 ATF_TC_BODY(uvm_physseg_get_start, tc)
958 {
959 uvm_physseg_t upm;
960
961 /* Fake early boot */
962 setup();
963
964 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
965 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
966
967 ATF_REQUIRE_EQ(0, uvmexp.npages);
968
969 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
970
971 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
972
973 /* This test will be triggered only if there are 2 or more segments. */
974 #if VM_PHYSSEG_MAX > 1
975 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
976 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
977
978 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
979
980 ATF_REQUIRE_EQ(0, uvmexp.npages);
981
982 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
983 #endif
984 }
985
986 ATF_TC(uvm_physseg_get_start_invalid);
987 ATF_TC_HEAD(uvm_physseg_get_start_invalid, tc)
988 {
989 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
990 correctly when uvm_physseg_get_start() is called with invalid \
991 parameter values.");
992 }
993 ATF_TC_BODY(uvm_physseg_get_start_invalid, tc)
994 {
995 /* Check for pgs == NULL */
996 setup();
997 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
998 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
999
1000 /* Force other check conditions */
1001 uvm.page_init_done = true;
1002
1003 ATF_REQUIRE_EQ(0, uvmexp.npages);
1004
1005 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1006
1007 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1008
1009 /* Invalid uvm_physseg_t */
1010 ATF_CHECK_EQ((paddr_t) -1,
1011 uvm_physseg_get_start(UVM_PHYSSEG_TYPE_INVALID));
1012 }
1013
1014 ATF_TC(uvm_physseg_get_end);
1015 ATF_TC_HEAD(uvm_physseg_get_end, tc)
1016 {
1017 atf_tc_set_md_var(tc, "descr", "Tests if the end PFN is returned \
1018 correctly from a segment created via uvm_page_physload().");
1019 }
1020 ATF_TC_BODY(uvm_physseg_get_end, tc)
1021 {
1022 uvm_physseg_t upm;
1023
1024 setup();
1025 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1026 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1027
1028 ATF_REQUIRE_EQ(0, uvmexp.npages);
1029
1030 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1031
1032 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1033
1034 /* This test will be triggered only if there are 2 or more segments. */
1035 #if VM_PHYSSEG_MAX > 1
1036 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1037 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1038
1039 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1040
1041 ATF_REQUIRE_EQ(0, uvmexp.npages);
1042
1043 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1044 #endif
1045 }
1046
1047 ATF_TC(uvm_physseg_get_end_invalid);
1048 ATF_TC_HEAD(uvm_physseg_get_end_invalid, tc)
1049 {
1050 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1051 correctly when uvm_physseg_get_end() is called with invalid \
1052 parameter values.");
1053 }
1054 ATF_TC_BODY(uvm_physseg_get_end_invalid, tc)
1055 {
1056 /* Check for pgs == NULL */
1057 setup();
1058 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1059 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1060
1061 /* Force other check conditions */
1062 uvm.page_init_done = true;
1063
1064 ATF_REQUIRE_EQ(0, uvmexp.npages);
1065
1066 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1067
1068 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1069
1070 /* Invalid uvm_physseg_t */
1071 ATF_CHECK_EQ((paddr_t) -1,
1072 uvm_physseg_get_end(UVM_PHYSSEG_TYPE_INVALID));
1073 }
1074
1075 ATF_TC(uvm_physseg_get_avail_start);
1076 ATF_TC_HEAD(uvm_physseg_get_avail_start, tc)
1077 {
1078 atf_tc_set_md_var(tc, "descr", "Tests if the avail_start PFN is \
1079 returned correctly from a segment created via uvm_page_physload().");
1080 }
1081 ATF_TC_BODY(uvm_physseg_get_avail_start, tc)
1082 {
1083 uvm_physseg_t upm;
1084
1085 setup();
1086 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1087 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1088
1089 ATF_REQUIRE_EQ(0, uvmexp.npages);
1090
1091 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1092
1093 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1094
1095 /* This test will be triggered only if there are 2 or more segments. */
1096 #if VM_PHYSSEG_MAX > 1
1097 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1098 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1099
1100 ATF_REQUIRE_EQ(0, uvmexp.npages);
1101
1102 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1103
1104 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1105 #endif
1106 }
1107
1108 ATF_TC(uvm_physseg_get_avail_start_invalid);
1109 ATF_TC_HEAD(uvm_physseg_get_avail_start_invalid, tc)
1110 {
1111 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1112 correctly when uvm_physseg_get_avail_start() is called with invalid\
1113 parameter values.");
1114 }
1115 ATF_TC_BODY(uvm_physseg_get_avail_start_invalid, tc)
1116 {
1117 /* Check for pgs == NULL */
1118 setup();
1119 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1120 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1121
1122 /* Force other check conditions */
1123 uvm.page_init_done = true;
1124
1125 ATF_REQUIRE_EQ(0, uvmexp.npages);
1126
1127 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1128
1129 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1130
1131 /* Invalid uvm_physseg_t */
1132 ATF_CHECK_EQ((paddr_t) -1,
1133 uvm_physseg_get_avail_start(UVM_PHYSSEG_TYPE_INVALID));
1134 }
1135
1136 ATF_TC(uvm_physseg_get_avail_end);
1137 ATF_TC_HEAD(uvm_physseg_get_avail_end, tc)
1138 {
1139 atf_tc_set_md_var(tc, "descr", "Tests if the avail_end PFN is \
1140 returned correctly from a segment created via uvm_page_physload().");
1141 }
1142 ATF_TC_BODY(uvm_physseg_get_avail_end, tc)
1143 {
1144 uvm_physseg_t upm;
1145
1146 setup();
1147 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1148 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1149
1150 ATF_REQUIRE_EQ(0, uvmexp.npages);
1151
1152 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1153
1154 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1155
1156 /* This test will be triggered only if there are 2 or more segments. */
1157 #if VM_PHYSSEG_MAX > 1
1158 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1159 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1160
1161 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1162
1163 ATF_REQUIRE_EQ(0, uvmexp.npages);
1164
1165 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1166 #endif
1167 }
1168
1169 ATF_TC(uvm_physseg_get_avail_end_invalid);
1170 ATF_TC_HEAD(uvm_physseg_get_avail_end_invalid, tc)
1171 {
1172 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1173 correctly when uvm_physseg_get_avail_end() is called with invalid\
1174 parameter values.");
1175 }
1176 ATF_TC_BODY(uvm_physseg_get_avail_end_invalid, tc)
1177 {
1178 /* Check for pgs == NULL */
1179 setup();
1180 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1181 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1182
1183 /* Force other check conditions */
1184 uvm.page_init_done = true;
1185
1186 ATF_REQUIRE_EQ(0, uvmexp.npages);
1187
1188 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1189
1190 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1191
1192 /* Invalid uvm_physseg_t */
1193 ATF_CHECK_EQ((paddr_t) -1,
1194 uvm_physseg_get_avail_end(UVM_PHYSSEG_TYPE_INVALID));
1195 }
1196
1197 ATF_TC(uvm_physseg_get_next);
1198 ATF_TC_HEAD(uvm_physseg_get_next, tc)
1199 {
1200 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for next \
1201 segment using the uvm_physseg_get_next() call.");
1202 }
1203 ATF_TC_BODY(uvm_physseg_get_next, tc)
1204 {
1205 uvm_physseg_t upm;
1206 #if VM_PHYSSEG_MAX > 1
1207 uvm_physseg_t upm_next;
1208 #endif
1209
1210 /* We insert the segments in ascending order */
1211
1212 setup();
1213 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1214 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1215
1216 ATF_REQUIRE_EQ(0, uvmexp.npages);
1217
1218 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1219
1220 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_OVERFLOW,
1221 uvm_physseg_get_next(upm));
1222
1223 /* This test will be triggered only if there are 2 or more segments. */
1224 #if VM_PHYSSEG_MAX > 1
1225 upm_next = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1226 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1227
1228 ATF_REQUIRE_EQ(0, uvmexp.npages);
1229
1230 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1231
1232 upm = uvm_physseg_get_next(upm); /* Fetch Next */
1233
1234 ATF_CHECK_EQ(upm_next, upm);
1235 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1236 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1237 #endif
1238
1239 /* This test will be triggered only if there are 3 or more segments. */
1240 #if VM_PHYSSEG_MAX > 2
1241 upm_next = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1242 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1243
1244 ATF_REQUIRE_EQ(0, uvmexp.npages);
1245
1246 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1247
1248 upm = uvm_physseg_get_next(upm); /* Fetch Next */
1249
1250 ATF_CHECK_EQ(upm_next, upm);
1251 ATF_CHECK_EQ(VALID_START_PFN_3, uvm_physseg_get_start(upm));
1252 ATF_CHECK_EQ(VALID_END_PFN_3, uvm_physseg_get_end(upm));
1253 #endif
1254 }
1255
1256 ATF_TC(uvm_physseg_get_next_invalid);
1257 ATF_TC_HEAD(uvm_physseg_get_next_invalid, tc)
1258 {
1259 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1260 correctly when uvm_physseg_get_next() is called with invalid \
1261 parameter values.");
1262 }
1263 ATF_TC_BODY(uvm_physseg_get_next_invalid, tc)
1264 {
1265 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID;
1266
1267 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID, uvm_physseg_get_next(upm));
1268 }
1269
1270 ATF_TC(uvm_physseg_get_prev);
1271 ATF_TC_HEAD(uvm_physseg_get_prev, tc)
1272 {
1273 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for previous \
1274 segment using the uvm_physseg_get_prev() call.");
1275 }
1276 ATF_TC_BODY(uvm_physseg_get_prev, tc)
1277 {
1278 #if VM_PHYSSEG_MAX > 1
1279 uvm_physseg_t upm;
1280 #endif
1281 uvm_physseg_t upm_prev;
1282
1283
1284 setup();
1285 upm_prev = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1286 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1287
1288 ATF_REQUIRE_EQ(0, uvmexp.npages);
1289
1290 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1291
1292 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY,
1293 uvm_physseg_get_prev(upm_prev));
1294
1295 /* This test will be triggered only if there are 2 or more segments. */
1296 #if VM_PHYSSEG_MAX > 1
1297 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1298 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1299
1300 ATF_REQUIRE_EQ(0, uvmexp.npages);
1301
1302 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1303
1304 /* Fetch Previous, we inserted a lower value */
1305 upm = uvm_physseg_get_prev(upm);
1306
1307 ATF_CHECK_EQ(upm_prev, upm);
1308 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1309 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1310 #endif
1311
1312 /* This test will be triggered only if there are 3 or more segments. */
1313 #if VM_PHYSSEG_MAX > 2
1314 uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1315 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1316
1317 ATF_REQUIRE_EQ(0, uvmexp.npages);
1318
1319 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1320
1321 /*
1322 * This will return a UVM_PHYSSEG_TYPE_INVALID_EMPTY we are at the
1323 * lowest
1324 */
1325 upm = uvm_physseg_get_prev(upm);
1326
1327 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY, upm);
1328 #endif
1329 }
1330
1331 ATF_TC(uvm_physseg_get_prev_invalid);
1332 ATF_TC_HEAD(uvm_physseg_get_prev_invalid, tc)
1333 {
1334 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1335 correctly when uvm_physseg_get_prev() is called with invalid \
1336 parameter values.");
1337 }
1338 ATF_TC_BODY(uvm_physseg_get_prev_invalid, tc)
1339 {
1340 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID;
1341
1342 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID, uvm_physseg_get_prev(upm));
1343 }
1344
1345 ATF_TC(uvm_physseg_get_first);
1346 ATF_TC_HEAD(uvm_physseg_get_first, tc)
1347 {
1348 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for first \
1349 segment (lowest node) using the uvm_physseg_get_first() call.");
1350 }
1351 ATF_TC_BODY(uvm_physseg_get_first, tc)
1352 {
1353 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID_EMPTY;
1354 uvm_physseg_t upm_first;
1355
1356 /* Fake early boot */
1357 setup();
1358
1359 /* No nodes exist */
1360 ATF_CHECK_EQ(upm, uvm_physseg_get_first());
1361
1362 upm_first = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1363 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1364
1365 ATF_REQUIRE_EQ(0, uvmexp.npages);
1366
1367 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1368
1369 /* Pointer to first should be the least valued node */
1370 upm = uvm_physseg_get_first();
1371 ATF_CHECK_EQ(upm_first, upm);
1372 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1373 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1374 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1375 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1376
1377 /* This test will be triggered only if there are 2 or more segments. */
1378 #if VM_PHYSSEG_MAX > 1
1379 /* Insert a node of lesser value */
1380 upm_first = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1381 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1382
1383 ATF_CHECK_EQ(0, uvmexp.npages);
1384
1385 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1386
1387 /* Pointer to first should be the least valued node */
1388 upm = uvm_physseg_get_first();
1389 ATF_CHECK_EQ(upm_first, upm);
1390 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1391 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1392 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1393 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1394 #endif
1395
1396 /* This test will be triggered only if there are 3 or more segments. */
1397 #if VM_PHYSSEG_MAX > 2
1398 /* Insert a node of higher value */
1399 upm_first =uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1400 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1401
1402 ATF_CHECK_EQ(0, uvmexp.npages);
1403
1404 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1405
1406 /* Pointer to first should be the least valued node */
1407 upm = uvm_physseg_get_first();
1408 ATF_CHECK(upm_first != upm);
1409 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1410 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1411 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1412 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1413 #endif
1414 }
1415
1416 ATF_TC(uvm_physseg_get_last);
1417 ATF_TC_HEAD(uvm_physseg_get_last, tc)
1418 {
1419 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for last \
1420 segment using the uvm_physseg_get_last() call.");
1421 }
1422 ATF_TC_BODY(uvm_physseg_get_last, tc)
1423 {
1424 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID_EMPTY;
1425 uvm_physseg_t upm_last;
1426
1427 setup();
1428
1429 /* No nodes exist */
1430 ATF_CHECK_EQ(upm, uvm_physseg_get_last());
1431
1432 upm_last = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1433 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1434
1435 ATF_REQUIRE_EQ(0, uvmexp.npages);
1436
1437 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1438
1439 /* Pointer to last should be the most valued node */
1440 upm = uvm_physseg_get_last();
1441 ATF_CHECK_EQ(upm_last, upm);
1442 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1443 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1444 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1445 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1446
1447 /* This test will be triggered only if there are 2 or more segments. */
1448 #if VM_PHYSSEG_MAX > 1
1449 /* Insert node of greater value */
1450 upm_last = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1451 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1452
1453 ATF_REQUIRE_EQ(0, uvmexp.npages);
1454
1455 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1456
1457 /* Pointer to last should be the most valued node */
1458 upm = uvm_physseg_get_last();
1459 ATF_CHECK_EQ(upm_last, upm);
1460 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1461 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1462 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1463 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1464 #endif
1465
1466 /* This test will be triggered only if there are 3 or more segments. */
1467 #if VM_PHYSSEG_MAX > 2
1468 /* Insert node of greater value */
1469 upm_last = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1470 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1471
1472 ATF_REQUIRE_EQ(0, uvmexp.npages);
1473
1474 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1475
1476 /* Pointer to last should be the most valued node */
1477 upm = uvm_physseg_get_last();
1478 ATF_CHECK_EQ(upm_last, upm);
1479 ATF_CHECK_EQ(VALID_START_PFN_3, uvm_physseg_get_start(upm));
1480 ATF_CHECK_EQ(VALID_END_PFN_3, uvm_physseg_get_end(upm));
1481 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_3, uvm_physseg_get_avail_start(upm));
1482 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3, uvm_physseg_get_avail_end(upm));
1483 #endif
1484 }
1485
1486 ATF_TC(uvm_physseg_valid);
1487 ATF_TC_HEAD(uvm_physseg_valid, tc)
1488 {
1489 atf_tc_set_md_var(tc, "descr", "Tests the pointer value for current \
1490 segment is valid using the uvm_physseg_valid() call.");
1491 }
1492 ATF_TC_BODY(uvm_physseg_valid, tc)
1493 {
1494 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1495
1496 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1497
1498 uvm_physseg_t upm;
1499
1500 setup();
1501 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1502 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1503
1504 ATF_REQUIRE_EQ(0, uvmexp.npages);
1505
1506 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1507
1508 uvm_physseg_init_seg(upm, pgs);
1509
1510 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1511
1512 ATF_CHECK_EQ(true, uvm_physseg_valid(upm));
1513 }
1514
1515 ATF_TC(uvm_physseg_valid_invalid);
1516 ATF_TC_HEAD(uvm_physseg_valid_invalid, tc)
1517 {
1518 atf_tc_set_md_var(tc, "descr", "Tests the pointer value for current \
1519 segment is invalid using the uvm_physseg_valid() call.");
1520 }
1521 ATF_TC_BODY(uvm_physseg_valid_invalid, tc)
1522 {
1523 uvm_physseg_t upm;
1524
1525 setup();
1526 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1527 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1528
1529 /* Force other check conditions */
1530 uvm.page_init_done = true;
1531
1532 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1533
1534 /* Invalid uvm_physseg_t */
1535 ATF_CHECK_EQ(false, uvm_physseg_valid(UVM_PHYSSEG_TYPE_INVALID));
1536
1537 /*
1538 * Without any pages initialized for segment, it is considered
1539 * invalid
1540 */
1541 ATF_CHECK_EQ(false, uvm_physseg_valid(upm));
1542 }
1543
1544 ATF_TC(uvm_physseg_get_highest);
1545 ATF_TC_HEAD(uvm_physseg_get_highest, tc)
1546 {
1547 atf_tc_set_md_var(tc, "descr", "Tests if the returned PFN matches \
1548 the highest PFN in use by the system.");
1549 }
1550 ATF_TC_BODY(uvm_physseg_get_highest, tc)
1551 {
1552 setup();
1553 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1554 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1555
1556 /* Only one segment so highest is the current */
1557 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1 - 1, uvm_physseg_get_highest_frame());
1558
1559 /* This test will be triggered only if there are 2 or more segments. */
1560 #if VM_PHYSSEG_MAX > 1
1561 uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1562 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1563
1564 /* PFN_3 > PFN_1 */
1565 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3 - 1, uvm_physseg_get_highest_frame());
1566 #endif
1567
1568 /* This test will be triggered only if there are 3 or more segments. */
1569 #if VM_PHYSSEG_MAX > 2
1570 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1571 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1572
1573 /* PFN_3 > PFN_2 */
1574 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3 - 1, uvm_physseg_get_highest_frame());
1575 #endif
1576 }
1577
1578 ATF_TC(uvm_physseg_get_free_list);
1579 ATF_TC_HEAD(uvm_physseg_get_free_list, tc)
1580 {
1581 atf_tc_set_md_var(tc, "descr", "Tests if the returned Free List type \
1582 of a segment matches the one returned from \
1583 uvm_physseg_get_free_list() call.");
1584 }
1585 ATF_TC_BODY(uvm_physseg_get_free_list, tc)
1586 {
1587 uvm_physseg_t upm;
1588
1589 /* Fake early boot */
1590 setup();
1591
1592 /* Insertions are made in ascending order */
1593 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1594 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1595
1596 ATF_CHECK_EQ(VM_FREELIST_DEFAULT, uvm_physseg_get_free_list(upm));
1597
1598 /* This test will be triggered only if there are 2 or more segments. */
1599 #if VM_PHYSSEG_MAX > 1
1600 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1601 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_FIRST16);
1602
1603 ATF_CHECK_EQ(VM_FREELIST_FIRST16, uvm_physseg_get_free_list(upm));
1604 #endif
1605
1606 /* This test will be triggered only if there are 3 or more segments. */
1607 #if VM_PHYSSEG_MAX > 2
1608 upm = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1609 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_FIRST1G);
1610
1611 ATF_CHECK_EQ(VM_FREELIST_FIRST1G, uvm_physseg_get_free_list(upm));
1612 #endif
1613 }
1614
1615 ATF_TC(uvm_physseg_get_start_hint);
1616 ATF_TC_HEAD(uvm_physseg_get_start_hint, tc)
1617 {
1618 atf_tc_set_md_var(tc, "descr", "Tests if the returned start_hint value \
1619 of a segment matches the one returned from \
1620 uvm_physseg_get_start_hint() call.");
1621 }
1622 ATF_TC_BODY(uvm_physseg_get_start_hint, tc)
1623 {
1624 uvm_physseg_t upm;
1625
1626 setup();
1627 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1628 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1629
1630 /* Will be Zero since no specific value is set during init */
1631 ATF_CHECK_EQ(0, uvm_physseg_get_start_hint(upm));
1632 }
1633
1634 ATF_TC(uvm_physseg_set_start_hint);
1635 ATF_TC_HEAD(uvm_physseg_set_start_hint, tc)
1636 {
1637 atf_tc_set_md_var(tc, "descr", "Tests if the returned start_hint value \
1638 of a segment matches the one set by the \
1639 uvm_physseg_set_start_hint() call.");
1640 }
1641 ATF_TC_BODY(uvm_physseg_set_start_hint, tc)
1642 {
1643 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1644
1645 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1646
1647 uvm_physseg_t upm;
1648
1649 setup();
1650 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1651 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1652
1653 uvm_physseg_init_seg(upm, pgs);
1654
1655 ATF_CHECK_EQ(true, uvm_physseg_set_start_hint(upm, atop(128)));
1656
1657 /* Will be atop(128) since no specific value is set above */
1658 ATF_CHECK_EQ(atop(128), uvm_physseg_get_start_hint(upm));
1659 }
1660
1661 ATF_TC(uvm_physseg_set_start_hint_invalid);
1662 ATF_TC_HEAD(uvm_physseg_set_start_hint_invalid, tc)
1663 {
1664 atf_tc_set_md_var(tc, "descr", "Tests if the returned value is false \
1665 when an invalid segment matches the one trying to set by the \
1666 uvm_physseg_set_start_hint() call.");
1667 }
1668 ATF_TC_BODY(uvm_physseg_set_start_hint_invalid, tc)
1669 {
1670 uvm_physseg_t upm;
1671
1672 setup();
1673 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1674 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1675
1676 /* Force other check conditions */
1677 uvm.page_init_done = true;
1678
1679 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1680
1681 ATF_CHECK_EQ(false, uvm_physseg_set_start_hint(upm, atop(128)));
1682
1683 /*
1684 * Will be Zero since no specific value is set after the init
1685 * due to failure
1686 */
1687 atf_tc_expect_signal(SIGABRT, "invalid uvm_physseg_t handle");
1688
1689 ATF_CHECK_EQ(0, uvm_physseg_get_start_hint(upm));
1690 }
1691
1692 ATF_TC(uvm_physseg_get_pg);
1693 ATF_TC_HEAD(uvm_physseg_get_pg, tc)
1694 {
1695 atf_tc_set_md_var(tc, "descr", "Tests if the returned vm_page struct \
1696 is correct when fetched by uvm_physseg_get_pg() call.");
1697 }
1698 ATF_TC_BODY(uvm_physseg_get_pg, tc)
1699 {
1700 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1701
1702 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1703
1704 struct vm_page *extracted_pg = NULL;
1705
1706 uvm_physseg_t upm;
1707
1708 setup();
1709 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1710 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1711
1712 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1713
1714 ATF_REQUIRE_EQ(0, uvmexp.npages);
1715
1716 /* Now we initialize the segment */
1717 uvm_physseg_init_seg(upm, pgs);
1718
1719 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1720
1721 ATF_REQUIRE_EQ(NULL, extracted_pg);
1722
1723 /* Try fetching the 5th Page in the Segment */
1724 extracted_pg = uvm_physseg_get_pg(upm, 5);
1725
1726 /* Values of phys_addr is n * PAGE_SIZE where n is the page number */
1727 ATF_CHECK_EQ(5 * PAGE_SIZE, extracted_pg->phys_addr);
1728
1729 /* Try fetching the 113th Page in the Segment */
1730 extracted_pg = uvm_physseg_get_pg(upm, 113);
1731
1732 ATF_CHECK_EQ(113 * PAGE_SIZE, extracted_pg->phys_addr);
1733 }
1734
1735 #ifdef __HAVE_PMAP_PHYSSEG
1736 ATF_TC(uvm_physseg_get_pmseg);
1737 ATF_TC_HEAD(uvm_physseg_get_pmseg, tc)
1738 {
1739 atf_tc_set_md_var(tc, "descr", "Tests if the returned pmap_physseg \
1740 struct is correct when fetched by uvm_physseg_get_pmseg() call.");
1741 }
1742 ATF_TC_BODY(uvm_physseg_get_pmseg, tc)
1743 {
1744 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1745
1746 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1747
1748 struct pmap_physseg pmseg = { true };
1749
1750 struct pmap_physseg *extracted_pmseg = NULL;
1751
1752 uvm_physseg_t upm;
1753
1754 setup();
1755 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1756 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1757
1758 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1759
1760 ATF_REQUIRE_EQ(0, uvmexp.npages);
1761
1762 /* Now we initialize the segment */
1763 uvm_physseg_init_seg(upm, pgs);
1764
1765 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1766
1767 ATF_REQUIRE_EQ(NULL, extracted_pmseg);
1768
1769 ATF_REQUIRE_EQ(true, pmseg.dummy_variable);
1770
1771 /* Extract the current pmseg */
1772 extracted_pmseg = uvm_physseg_get_pmseg(upm);
1773
1774 /*
1775 * We can only check if it is not NULL
1776 * We do not know the value it contains
1777 */
1778 ATF_CHECK(NULL != extracted_pmseg);
1779
1780 extracted_pmseg->dummy_variable = pmseg.dummy_variable;
1781
1782 /* Invert value to ensure test integrity */
1783 pmseg.dummy_variable = false;
1784
1785 ATF_REQUIRE_EQ(false, pmseg.dummy_variable);
1786
1787 extracted_pmseg = uvm_physseg_get_pmseg(upm);
1788
1789 ATF_CHECK(NULL != extracted_pmseg);
1790
1791 ATF_CHECK_EQ(true, extracted_pmseg->dummy_variable);
1792 }
1793 #endif
1794
1795 ATF_TC(vm_physseg_find);
1796 ATF_TC_HEAD(vm_physseg_find, tc)
1797 {
1798 atf_tc_set_md_var(tc, "descr", "Tests if the returned segment number \
1799 is correct when an PFN is passed into uvm_physseg_find() call. \
1800 In addition to this the offset of the PFN from the start of \
1801 segment is also set if the parameter is passed in as not NULL.");
1802 }
1803 ATF_TC_BODY(vm_physseg_find, tc)
1804 {
1805 psize_t offset = (psize_t) -1;
1806
1807 uvm_physseg_t upm_first, result;
1808 #if VM_PHYSSEG_MAX > 1
1809 uvm_physseg_t upm_second;
1810 #endif
1811
1812 setup();
1813
1814 upm_first = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1815 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1816
1817 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1818
1819 ATF_REQUIRE_EQ(0, uvmexp.npages);
1820
1821 /* This test will be triggered only if there are 2 or more segments. */
1822 #if VM_PHYSSEG_MAX > 1
1823 upm_second = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1824 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1825
1826 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1827
1828 ATF_REQUIRE_EQ(0, uvmexp.npages);
1829 #endif
1830
1831 /* Under ONE_MEGABYTE is segment upm_first */
1832 result = uvm_physseg_find(atop(ONE_MEGABYTE - 1024), NULL);
1833 ATF_CHECK_EQ(upm_first, result);
1834 ATF_CHECK_EQ(uvm_physseg_get_start(upm_first),
1835 uvm_physseg_get_start(result));
1836 ATF_CHECK_EQ(uvm_physseg_get_end(upm_first),
1837 uvm_physseg_get_end(result));
1838 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_first),
1839 uvm_physseg_get_avail_start(result));
1840 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_first),
1841 uvm_physseg_get_avail_end(result));
1842
1843 ATF_REQUIRE_EQ((psize_t) -1, offset);
1844
1845 /* This test will be triggered only if there are 2 or more segments. */
1846 #if VM_PHYSSEG_MAX > 1
1847 /* Over ONE_MEGABYTE is segment upm_second */
1848 result = uvm_physseg_find(atop(ONE_MEGABYTE + 8192), &offset);
1849 ATF_CHECK_EQ(upm_second, result);
1850 ATF_CHECK_EQ(uvm_physseg_get_start(upm_second),
1851 uvm_physseg_get_start(result));
1852 ATF_CHECK_EQ(uvm_physseg_get_end(upm_second),
1853 uvm_physseg_get_end(result));
1854 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_second),
1855 uvm_physseg_get_avail_start(result));
1856 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_second),
1857 uvm_physseg_get_avail_end(result));
1858
1859 /* Offset is calculated based on PAGE_SIZE */
1860 /* atop(ONE_MEGABYTE + (2 * PAGE_SIZE)) - VALID_START_PFN1 = 2 */
1861 ATF_CHECK_EQ(2, offset);
1862 #else
1863 /* Under ONE_MEGABYTE is segment upm_first */
1864 result = uvm_physseg_find(atop(ONE_MEGABYTE - 12288), &offset);
1865 ATF_CHECK_EQ(upm_first, result);
1866 ATF_CHECK_EQ(uvm_physseg_get_start(upm_first),
1867 uvm_physseg_get_start(result));
1868 ATF_CHECK_EQ(uvm_physseg_get_end(upm_first),
1869 uvm_physseg_get_end(result));
1870 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_first),
1871 uvm_physseg_get_avail_start(result));
1872 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_first),
1873 uvm_physseg_get_avail_end(result));
1874
1875 /* Offset is calculated based on PAGE_SIZE */
1876 /* atop(ONE_MEGABYTE - (3 * PAGE_SIZE)) - VALID_START_PFN1 = 253 */
1877 ATF_CHECK_EQ(253, offset);
1878 #endif
1879 }
1880
1881 ATF_TC(vm_physseg_find_invalid);
1882 ATF_TC_HEAD(vm_physseg_find_invalid, tc)
1883 {
1884 atf_tc_set_md_var(tc, "descr", "Tests if the returned segment number \
1885 is (paddr_t) -1 when a non existant PFN is passed into \
1886 uvm_physseg_find() call.");
1887 }
1888 ATF_TC_BODY(vm_physseg_find_invalid, tc)
1889 {
1890 psize_t offset = (psize_t) -1;
1891
1892 setup();
1893 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1894 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1895
1896 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1897
1898 ATF_REQUIRE_EQ(0, uvmexp.npages);
1899
1900 /* No segments over 3 MB exists at the moment */
1901 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID,
1902 uvm_physseg_find(atop(ONE_MEGABYTE * 3), NULL));
1903
1904 ATF_REQUIRE_EQ((psize_t) -1, offset);
1905
1906 /* No segments over 3 MB exists at the moment */
1907 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID,
1908 uvm_physseg_find(atop(ONE_MEGABYTE * 3), &offset));
1909
1910 ATF_CHECK_EQ((psize_t) -1, offset);
1911 }
1912
1913 ATF_TC(uvm_page_physunload_start);
1914 ATF_TC_HEAD(uvm_page_physunload_start, tc)
1915 {
1916 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
1917 call works without a panic(). Unloads from Start of the segment.");
1918 }
1919 ATF_TC_BODY(uvm_page_physunload_start, tc)
1920 {
1921 /*
1922 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
1923 */
1924 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
1925
1926 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1927
1928 paddr_t p = 0;
1929
1930 uvm_physseg_t upm;
1931
1932 setup();
1933 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1934 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1935
1936 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1937
1938 ATF_REQUIRE_EQ(0, uvmexp.npages);
1939
1940 uvm_physseg_init_seg(upm, pgs);
1941
1942 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
1943
1944 /*
1945 * When called for first time, uvm_page_physload() removes the first PFN
1946 *
1947 * New avail start will be VALID_AVAIL_START_PFN_2 + 1
1948 */
1949 ATF_CHECK_EQ(VALID_START_PFN_2, atop(p));
1950
1951 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
1952 uvm_physseg_get_avail_start(upm));
1953
1954 ATF_CHECK_EQ(VALID_START_PFN_2 + 1, uvm_physseg_get_start(upm));
1955
1956 /* Rest of the stuff should remain the same */
1957 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1958 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1959 }
1960
1961 ATF_TC(uvm_page_physunload_end);
1962 ATF_TC_HEAD(uvm_page_physunload_end, tc)
1963 {
1964 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
1965 call works without a panic(). Unloads from End of the segment.");
1966 }
1967 ATF_TC_BODY(uvm_page_physunload_end, tc)
1968 {
1969 /*
1970 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
1971 */
1972 paddr_t p = 0;
1973
1974 uvm_physseg_t upm;
1975
1976 setup();
1977 /* Note: start != avail_start to remove from end. */
1978 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1979 VALID_AVAIL_START_PFN_2 + 1, VALID_AVAIL_END_PFN_2,
1980 VM_FREELIST_DEFAULT);
1981
1982 p = 0;
1983
1984 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1985
1986 ATF_REQUIRE_EQ(0, uvmexp.npages);
1987
1988 ATF_REQUIRE(
1989 uvm_physseg_get_avail_start(upm) != uvm_physseg_get_start(upm));
1990
1991 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
1992
1993 /*
1994 * Remember if X is the upper limit the actual valid pointer is X - 1
1995 *
1996 * For example if 256 is the upper limit for 1MB memory, last valid
1997 * pointer is 256 - 1 = 255
1998 */
1999
2000 ATF_CHECK_EQ(VALID_END_PFN_2 - 1, atop(p));
2001
2002 /*
2003 * When called for second time, uvm_page_physload() removes the last PFN
2004 *
2005 * New avail end will be VALID_AVAIL_END_PFN_2 - 1
2006 * New end will be VALID_AVAIL_PFN_2 - 1
2007 */
2008
2009 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2 - 1, uvm_physseg_get_avail_end(upm));
2010
2011 ATF_CHECK_EQ(VALID_END_PFN_2 - 1, uvm_physseg_get_end(upm));
2012
2013 /* Rest of the stuff should remain the same */
2014 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
2015 uvm_physseg_get_avail_start(upm));
2016 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2017 }
2018
2019 ATF_TC(uvm_page_physunload_none);
2020 ATF_TC_HEAD(uvm_page_physunload_none, tc)
2021 {
2022 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
2023 call works without a panic(). Does not unload from start or end \
2024 because of non-aligned start / avail_start and end / avail_end \
2025 respectively.");
2026 }
2027 ATF_TC_BODY(uvm_page_physunload_none, tc)
2028 {
2029 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
2030
2031 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
2032
2033 paddr_t p = 0;
2034
2035 uvm_physseg_t upm;
2036
2037 setup();
2038 /*
2039 * Note: start != avail_start and end != avail_end.
2040 *
2041 * This prevents any unload from occuring.
2042 */
2043 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2044 VALID_AVAIL_START_PFN_2 + 1, VALID_AVAIL_END_PFN_2 - 1,
2045 VM_FREELIST_DEFAULT);
2046
2047 p = 0;
2048
2049 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2050
2051 ATF_REQUIRE_EQ(0, uvmexp.npages);
2052
2053 ATF_REQUIRE(
2054 uvm_physseg_get_avail_start(upm) != uvm_physseg_get_start(upm));
2055
2056 uvm_physseg_init_seg(upm, pgs);
2057
2058 ATF_CHECK_EQ(false, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2059
2060 /* uvm_page_physload() will no longer unload memory */
2061 ATF_CHECK_EQ(0, p);
2062
2063 /* Rest of the stuff should remain the same */
2064 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
2065 uvm_physseg_get_avail_start(upm));
2066 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2 - 1,
2067 uvm_physseg_get_avail_end(upm));
2068 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2069 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2070 }
2071
2072 ATF_TC(uvm_page_physunload_delete_start);
2073 ATF_TC_HEAD(uvm_page_physunload_delete_start, tc)
2074 {
2075 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2076 works when the segment gets small enough to be deleted scenario. \
2077 NOTE: This one works deletes from start.");
2078 }
2079 ATF_TC_BODY(uvm_page_physunload_delete_start, tc)
2080 {
2081 /*
2082 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2083 */
2084 paddr_t p = 0;
2085
2086 uvm_physseg_t upm;
2087
2088 setup();
2089
2090 /*
2091 * Setup the Nuke from Starting point
2092 */
2093
2094 upm = uvm_page_physload(VALID_END_PFN_1 - 1, VALID_END_PFN_1,
2095 VALID_AVAIL_END_PFN_1 - 1, VALID_AVAIL_END_PFN_1,
2096 VM_FREELIST_DEFAULT);
2097
2098 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2099
2100 ATF_REQUIRE_EQ(0, uvmexp.npages);
2101
2102 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2103 #if VM_PHYSSEG_MAX > 1
2104 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2105 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2106
2107 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2108 #endif
2109
2110 #if VM_PHYSSEG_MAX == 1
2111 atf_tc_expect_signal(SIGABRT,
2112 "cannot uvm_page_physunload() the last segment");
2113 #endif
2114
2115 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2116
2117 ATF_CHECK_EQ(VALID_END_PFN_1 - 1, atop(p));
2118
2119 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2120
2121 /* The only node now is the one we inserted second. */
2122 upm = uvm_physseg_get_first();
2123
2124 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2125 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2126 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
2127 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
2128 }
2129
2130 ATF_TC(uvm_page_physunload_delete_end);
2131 ATF_TC_HEAD(uvm_page_physunload_delete_end, tc)
2132 {
2133 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2134 works when the segment gets small enough to be deleted scenario. \
2135 NOTE: This one works deletes from end.");
2136 }
2137 ATF_TC_BODY(uvm_page_physunload_delete_end, tc)
2138 {
2139 /*
2140 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2141 */
2142
2143 paddr_t p = 0;
2144
2145 uvm_physseg_t upm;
2146
2147 setup();
2148
2149 /*
2150 * Setup the Nuke from Ending point
2151 */
2152
2153 upm = uvm_page_physload(VALID_START_PFN_1, VALID_START_PFN_1 + 2,
2154 VALID_AVAIL_START_PFN_1 + 1, VALID_AVAIL_START_PFN_1 + 2,
2155 VM_FREELIST_DEFAULT);
2156
2157 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2158
2159 ATF_REQUIRE_EQ(0, uvmexp.npages);
2160
2161 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2162 #if VM_PHYSSEG_MAX > 1
2163 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2164 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2165
2166 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2167 #endif
2168
2169 #if VM_PHYSSEG_MAX == 1
2170 atf_tc_expect_signal(SIGABRT,
2171 "cannot uvm_page_physunload() the last segment");
2172 #endif
2173
2174 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2175
2176 p = 0;
2177
2178 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2179
2180 ATF_CHECK_EQ(VALID_START_PFN_1 + 2, atop(p));
2181
2182 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2183
2184 /* The only node now is the one we inserted second. */
2185 upm = uvm_physseg_get_first();
2186
2187 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2188 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2189 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
2190 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
2191 }
2192
2193 ATF_TC(uvm_page_physunload_invalid);
2194 ATF_TC_HEAD(uvm_page_physunload_invalid, tc)
2195 {
2196 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2197 fails when then Free list does not match.");
2198 }
2199 ATF_TC_BODY(uvm_page_physunload_invalid, tc)
2200 {
2201 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
2202
2203 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
2204
2205 paddr_t p = 0;
2206
2207 uvm_physseg_t upm;
2208
2209 setup();
2210 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2211 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2212
2213 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2214
2215 ATF_REQUIRE_EQ(0, uvmexp.npages);
2216
2217 uvm_physseg_init_seg(upm, pgs);
2218
2219 ATF_CHECK_EQ(false, uvm_page_physunload(upm, VM_FREELIST_FIRST4G, &p));
2220 }
2221
2222 ATF_TC(uvm_page_physunload_force);
2223 ATF_TC_HEAD(uvm_page_physunload_force, tc)
2224 {
2225 atf_tc_set_md_var(tc, "descr", "Tests if the basic \
2226 uvm_page_physunload_force() including delete works without.");
2227 }
2228 ATF_TC_BODY(uvm_page_physunload_force, tc)
2229 {
2230 /*
2231 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2232 */
2233 paddr_t p = 0;
2234
2235 uvm_physseg_t upm;
2236
2237 setup();
2238 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
2239 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
2240
2241 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2242
2243 ATF_REQUIRE_EQ(0, uvmexp.npages);
2244
2245 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2246 #if VM_PHYSSEG_MAX > 1
2247 /*
2248 * We have couple of physloads done this is bacause of the fact that if
2249 * we physunload all the PFs from a given range and we have only one
2250 * segment in total a panic() is called
2251 */
2252 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2253 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2254
2255 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2256 #endif
2257
2258 #if VM_PHYSSEG_MAX == 1
2259 atf_tc_expect_signal(SIGABRT,
2260 "cannot uvm_page_physunload() the last segment");
2261 #endif
2262
2263 ATF_REQUIRE_EQ(VALID_AVAIL_START_PFN_1,
2264 uvm_physseg_get_avail_start(upm));
2265
2266 for(paddr_t i = VALID_AVAIL_START_PFN_1;
2267 i < VALID_AVAIL_END_PFN_1; i++) {
2268 ATF_CHECK_EQ(true,
2269 uvm_page_physunload_force(upm, VM_FREELIST_DEFAULT, &p));
2270 ATF_CHECK_EQ(i, atop(p));
2271
2272 if(i + 1 < VALID_AVAIL_END_PFN_1)
2273 ATF_CHECK_EQ(i + 1, uvm_physseg_get_avail_start(upm));
2274 }
2275
2276 /*
2277 * Now we try to retrieve the segment, which has been removed
2278 * from the system through force unloading all the pages inside it.
2279 */
2280 upm = uvm_physseg_find(VALID_AVAIL_END_PFN_1 - 1, NULL);
2281
2282 /* It should no longer exist */
2283 ATF_CHECK_EQ(NULL, upm);
2284
2285 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2286 }
2287
2288 ATF_TC(uvm_page_physunload_force_invalid);
2289 ATF_TC_HEAD(uvm_page_physunload_force_invalid, tc)
2290 {
2291 atf_tc_set_md_var(tc, "descr", "Tests if the invalid conditions for \
2292 uvm_page_physunload_force_invalid().");
2293 }
2294 ATF_TC_BODY(uvm_page_physunload_force_invalid, tc)
2295 {
2296 paddr_t p = 0;
2297
2298 uvm_physseg_t upm;
2299
2300 setup();
2301 upm = uvm_page_physload(VALID_START_PFN_2, VALID_START_PFN_2+ 1,
2302 VALID_START_PFN_2, VALID_START_PFN_2, VM_FREELIST_DEFAULT);
2303
2304 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2305
2306 ATF_REQUIRE_EQ(0, uvmexp.npages);
2307
2308 ATF_CHECK_EQ(false,
2309 uvm_page_physunload_force(upm, VM_FREELIST_DEFAULT, &p));
2310
2311 ATF_CHECK_EQ(0, p);
2312 }
2313
2314 ATF_TP_ADD_TCS(tp)
2315 {
2316 #if defined(UVM_HOTPLUG)
2317 /* Internal */
2318 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_atboot_mismatch);
2319 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_atboot_overrun);
2320 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_sanity);
2321 ATF_TP_ADD_TC(tp, uvm_physseg_free_atboot_mismatch);
2322 ATF_TP_ADD_TC(tp, uvm_physseg_free_sanity);
2323 #if VM_PHYSSEG_MAX > 1
2324 ATF_TP_ADD_TC(tp, uvm_physseg_atboot_free_leak);
2325 #endif
2326 #endif /* UVM_HOTPLUG */
2327
2328 ATF_TP_ADD_TC(tp, uvm_physseg_plug);
2329 ATF_TP_ADD_TC(tp, uvm_physseg_unplug);
2330
2331 /* Exported */
2332 ATF_TP_ADD_TC(tp, uvm_physseg_init);
2333 ATF_TP_ADD_TC(tp, uvm_page_physload_preload);
2334 ATF_TP_ADD_TC(tp, uvm_page_physload_postboot);
2335 ATF_TP_ADD_TC(tp, uvm_physseg_handle_immutable);
2336 ATF_TP_ADD_TC(tp, uvm_physseg_seg_chomp_slab);
2337 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_from_slab);
2338 ATF_TP_ADD_TC(tp, uvm_physseg_init_seg);
2339 ATF_TP_ADD_TC(tp, uvm_physseg_get_start);
2340 ATF_TP_ADD_TC(tp, uvm_physseg_get_start_invalid);
2341 ATF_TP_ADD_TC(tp, uvm_physseg_get_end);
2342 ATF_TP_ADD_TC(tp, uvm_physseg_get_end_invalid);
2343 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_start);
2344 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_start_invalid);
2345 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_end);
2346 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_end_invalid);
2347 ATF_TP_ADD_TC(tp, uvm_physseg_get_next);
2348 ATF_TP_ADD_TC(tp, uvm_physseg_get_next_invalid);
2349 ATF_TP_ADD_TC(tp, uvm_physseg_get_prev);
2350 ATF_TP_ADD_TC(tp, uvm_physseg_get_prev_invalid);
2351 ATF_TP_ADD_TC(tp, uvm_physseg_get_first);
2352 ATF_TP_ADD_TC(tp, uvm_physseg_get_last);
2353 ATF_TP_ADD_TC(tp, uvm_physseg_valid);
2354 ATF_TP_ADD_TC(tp, uvm_physseg_valid_invalid);
2355 ATF_TP_ADD_TC(tp, uvm_physseg_get_highest);
2356 ATF_TP_ADD_TC(tp, uvm_physseg_get_free_list);
2357 ATF_TP_ADD_TC(tp, uvm_physseg_get_start_hint);
2358 ATF_TP_ADD_TC(tp, uvm_physseg_set_start_hint);
2359 ATF_TP_ADD_TC(tp, uvm_physseg_set_start_hint_invalid);
2360 ATF_TP_ADD_TC(tp, uvm_physseg_get_pg);
2361
2362 #ifdef __HAVE_PMAP_PHYSSEG
2363 ATF_TP_ADD_TC(tp, uvm_physseg_get_pmseg);
2364 #endif
2365 ATF_TP_ADD_TC(tp, vm_physseg_find);
2366 ATF_TP_ADD_TC(tp, vm_physseg_find_invalid);
2367
2368 ATF_TP_ADD_TC(tp, uvm_page_physunload_start);
2369 ATF_TP_ADD_TC(tp, uvm_page_physunload_end);
2370 ATF_TP_ADD_TC(tp, uvm_page_physunload_none);
2371 ATF_TP_ADD_TC(tp, uvm_page_physunload_delete_start);
2372 ATF_TP_ADD_TC(tp, uvm_page_physunload_delete_end);
2373 ATF_TP_ADD_TC(tp, uvm_page_physunload_invalid);
2374 ATF_TP_ADD_TC(tp, uvm_page_physunload_force);
2375 ATF_TP_ADD_TC(tp, uvm_page_physunload_force_invalid);
2376
2377 return atf_no_error();
2378 }
2379