Home | History | Annotate | Download | only in libnvmm

Lines Matching refs:mach

56 nvmm_vcpu_dump(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu)
67 ret = nvmm_vcpu_getstate(mach, vcpu, NVMM_X64_STATE_ALL);
123 x86_gva_to_gpa_32bit(struct nvmm_machine *mach, uint64_t cr3,
136 if (nvmm_gpa_to_hva(mach, L2gpa, &L2hva, &pageprot) == -1)
156 if (nvmm_gpa_to_hva(mach, L1gpa, &L1hva, &pageprot) == -1)
196 x86_gva_to_gpa_32bit_pae(struct nvmm_machine *mach, uint64_t cr3,
209 if (nvmm_gpa_to_hva(mach, L3gpa, &L3hva, &pageprot) == -1)
222 if (nvmm_gpa_to_hva(mach, L2gpa, &L2hva, &pageprot) == -1)
242 if (nvmm_gpa_to_hva(mach, L1gpa, &L1hva, &pageprot) == -1)
296 x86_gva_to_gpa_64bit(struct nvmm_machine *mach, uint64_t cr3,
312 if (nvmm_gpa_to_hva(mach, L4gpa, &L4hva, &pageprot) == -1)
329 if (nvmm_gpa_to_hva(mach, L3gpa, &L3hva, &pageprot) == -1)
349 if (nvmm_gpa_to_hva(mach, L2gpa, &L2hva, &pageprot) == -1)
369 if (nvmm_gpa_to_hva(mach, L1gpa, &L1hva, &pageprot) == -1)
389 x86_gva_to_gpa(struct nvmm_machine *mach, struct nvmm_x64_state *state,
414 ret = x86_gva_to_gpa_64bit(mach, cr3, gva, gpa, prot);
417 ret = x86_gva_to_gpa_32bit_pae(mach, cr3, gva, gpa, prot);
420 ret = x86_gva_to_gpa_32bit(mach, cr3, gva, gpa, has_pse, prot);
435 nvmm_gva_to_gpa(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu,
441 ret = nvmm_vcpu_getstate(mach, vcpu,
446 return x86_gva_to_gpa(mach, state, gva, gpa, prot);
557 read_guest_memory(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu,
568 ret = x86_gva_to_gpa(mach, state, gva, &gpa, &prot);
584 ret = nvmm_gpa_to_hva(mach, gpa, &hva, &prot);
588 mem.mach = mach;
604 ret = read_guest_memory(mach, vcpu, gva + size,
614 write_guest_memory(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu,
625 ret = x86_gva_to_gpa(mach, state, gva, &gpa, &prot);
641 ret = nvmm_gpa_to_hva(mach, gpa, &hva, &prot);
645 mem.mach = mach;
661 ret = write_guest_memory(mach, vcpu, gva + size,
677 assist_io_batch(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu,
691 ret = read_guest_memory(mach, vcpu, gva, iobuf, iosize);
702 ret = write_guest_memory(mach, vcpu, gva, iobuf, iosize);
711 nvmm_assist_io(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu)
729 io.mach = mach;
736 ret = nvmm_vcpu_getstate(mach, vcpu,
773 seg = fetch_segment(mach, vcpu);
791 iocnt = assist_io_batch(mach, vcpu, &io, gva, cnt);
802 ret = read_guest_memory(mach, vcpu, gva, io.data,
819 ret = write_guest_memory(mach, vcpu, gva, io.data,
846 ret = nvmm_vcpu_setstate(mach, vcpu, NVMM_X64_STATE_GPRS);
3100 fetch_segment(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu)
3119 ret = read_guest_memory(mach, vcpu, gva, inst_bytes, fetchsize);
3158 fetch_instruction(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu,
3177 ret = read_guest_memory(mach, vcpu, gva, exit->u.mem.inst_bytes,
3188 assist_mem_movs(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu,
3205 ret = read_guest_memory(mach, vcpu, gva, data, size);
3213 ret = write_guest_memory(mach, vcpu, gva, data, size);
3234 assist_mem_cmps(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu,
3251 ret = read_guest_memory(mach, vcpu, gva, data1, size);
3259 ret = read_guest_memory(mach, vcpu, gva, data2, size);
3288 assist_mem_single(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu,
3299 mem.mach = mach;
3412 nvmm_assist_mem(struct nvmm_machine *mach, struct nvmm_vcpu *vcpu)
3425 ret = nvmm_vcpu_getstate(mach, vcpu,
3436 ret = fetch_instruction(mach, vcpu, exit);
3457 ret = assist_mem_movs(mach, vcpu, &instr);
3460 ret = assist_mem_cmps(mach, vcpu, &instr);
3462 ret = assist_mem_single(mach, vcpu, &instr);
3490 ret = nvmm_vcpu_setstate(mach, vcpu, NVMM_X64_STATE_GPRS);