cputlb.c: Use correct address space when looking up MemoryRegionSection

When looking up the MemoryRegionSection for the new TLB entry in
tlb_set_page_with_attrs(), use cpu_asidx_from_attrs() to determine
the correct address space index for the lookup, and pass it into
address_space_translate_for_iotlb().

Backports commit d7898cda81b6efa6b2d7a749882695cdcf280eaa from qemu
This commit is contained in:
Peter Maydell 2018-02-17 22:50:28 -05:00 committed by Lioncash
parent d23831f4dd
commit 8edd6ffdfd
No known key found for this signature in database
GPG key ID: 4E3C3CC1031BA9C7
20 changed files with 23 additions and 36 deletions

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_aarch64
#define cpu_address_space_init cpu_address_space_init_aarch64
#define cpu_exec_init_all cpu_exec_init_all_aarch64
#define cpu_reload_memory_map cpu_reload_memory_map_aarch64
#define vm_start vm_start_aarch64
#define resume_all_vcpus resume_all_vcpus_aarch64
#define a15_l2ctlr_read a15_l2ctlr_read_aarch64

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_aarch64eb
#define cpu_address_space_init cpu_address_space_init_aarch64eb
#define cpu_exec_init_all cpu_exec_init_all_aarch64eb
#define cpu_reload_memory_map cpu_reload_memory_map_aarch64eb
#define vm_start vm_start_aarch64eb
#define resume_all_vcpus resume_all_vcpus_aarch64eb
#define a15_l2ctlr_read a15_l2ctlr_read_aarch64eb

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_arm
#define cpu_address_space_init cpu_address_space_init_arm
#define cpu_exec_init_all cpu_exec_init_all_arm
#define cpu_reload_memory_map cpu_reload_memory_map_arm
#define vm_start vm_start_arm
#define resume_all_vcpus resume_all_vcpus_arm
#define a15_l2ctlr_read a15_l2ctlr_read_arm

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_armeb
#define cpu_address_space_init cpu_address_space_init_armeb
#define cpu_exec_init_all cpu_exec_init_all_armeb
#define cpu_reload_memory_map cpu_reload_memory_map_armeb
#define vm_start vm_start_armeb
#define resume_all_vcpus resume_all_vcpus_armeb
#define a15_l2ctlr_read a15_l2ctlr_read_armeb

View file

@ -35,15 +35,6 @@ void cpu_resume_from_signal(CPUState *cpu, void *puc)
siglongjmp(cpu->jmp_env, 1);
}
void cpu_reload_memory_map(CPUState *cpu)
{
/* The TLB is protected by the iothread lock. */
/* The CPU and TLB are protected by the iothread lock. */
AddressSpaceDispatch *d = cpu->as->dispatch;
cpu->memory_dispatch = d;
tlb_flush(cpu, 1);
}
void cpu_loop_exit(CPUState *cpu)
{
cpu->current_tb = NULL;
@ -57,4 +48,4 @@ void cpu_loop_exit_restore(CPUState *cpu, uintptr_t pc)
}
cpu->current_tb = NULL;
siglongjmp(cpu->jmp_env, 1);
}
}

View file

@ -196,6 +196,7 @@ void tlb_set_page_with_attrs(CPUState *cpu, target_ulong vaddr,
CPUTLBEntry *te;
hwaddr iotlb, xlat, sz;
unsigned vidx = env->vtlb_index++ % CPU_VTLB_SIZE;
int asidx = cpu_asidx_from_attrs(cpu, attrs);
assert(size >= TARGET_PAGE_SIZE);
if (size != TARGET_PAGE_SIZE) {
@ -203,7 +204,7 @@ void tlb_set_page_with_attrs(CPUState *cpu, target_ulong vaddr,
}
sz = size;
section = address_space_translate_for_iotlb(cpu, paddr, &xlat, &sz);
section = address_space_translate_for_iotlb(cpu, asidx, paddr, &xlat, &sz);
assert(sz >= TARGET_PAGE_SIZE);
#if defined(DEBUG_TLB)

View file

@ -361,12 +361,13 @@ MemoryRegion *address_space_translate(AddressSpace *as, hwaddr addr,
}
MemoryRegionSection *
address_space_translate_for_iotlb(CPUState *cpu, hwaddr addr,
address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr addr,
hwaddr *xlat, hwaddr *plen)
{
MemoryRegionSection *section;
section = address_space_translate_internal(cpu->memory_dispatch,
addr, xlat, plen, false);
AddressSpaceDispatch *d = cpu->cpu_ases[asidx].memory_dispatch;
section = address_space_translate_internal(d, addr, xlat, plen, false);
assert(!section->mr->iommu_ops);
return section;
@ -1593,7 +1594,9 @@ static uint16_t dummy_section(PhysPageMap *map, AddressSpace *as,
MemoryRegion *iotlb_to_region(CPUState *cpu, hwaddr index)
{
MemoryRegionSection *sections = cpu->memory_dispatch->map.sections;
CPUAddressSpace *cpuas = &cpu->cpu_ases[0];
AddressSpaceDispatch *d = cpuas->memory_dispatch;
MemoryRegionSection *sections = d->map.sections;
return sections[index & ~TARGET_PAGE_MASK].mr;
}
@ -1644,12 +1647,20 @@ static void mem_commit(MemoryListener *listener)
static void tcg_commit(MemoryListener *listener)
{
struct uc_struct* uc = listener->address_space_filter->uc;
CPUAddressSpace *cpuas;
AddressSpaceDispatch *d;
/* since each CPU stores ram addresses in its TLB cache, we must
reset the modified entries */
/* XXX: slow ! */
cpu_reload_memory_map(uc->cpu);
cpuas = container_of(listener, CPUAddressSpace, tcg_as_listener);
/* The CPU and TLB are protected by the iothread lock.
* We reload the dispatch pointer now because cpu_reloading_memory_map()
* may have split the RCU critical section.
*/
d = cpuas->as->dispatch;
cpuas->memory_dispatch = d;
tlb_flush(cpuas->cpu, 1);
}
void address_space_init_dispatch(AddressSpace *as)

View file

@ -55,7 +55,6 @@ symbols = (
'memory_register_types',
'cpu_address_space_init',
'cpu_exec_init_all',
'cpu_reload_memory_map',
'vm_start',
'resume_all_vcpus',
'a15_l2ctlr_read',

View file

@ -33,8 +33,8 @@ void tlb_set_dirty(CPUState *env, target_ulong vaddr);
void tb_flush_jmp_cache(CPUState *cpu, target_ulong addr);
MemoryRegionSection *
address_space_translate_for_iotlb(CPUState *cpu, hwaddr addr, hwaddr *xlat,
hwaddr *plen);
address_space_translate_for_iotlb(CPUState *cpu, int asidx, hwaddr addr,
hwaddr *xlat, hwaddr *plen);
hwaddr memory_region_section_get_iotlb(CPUState *cpu,
MemoryRegionSection *section,
target_ulong vaddr,

View file

@ -82,7 +82,6 @@ void QEMU_NORETURN cpu_loop_exit(CPUState *cpu);
void QEMU_NORETURN cpu_loop_exit_restore(CPUState *cpu, uintptr_t pc);
#if !defined(CONFIG_USER_ONLY)
void cpu_reload_memory_map(CPUState *cpu);
/**
* cpu_address_space_init:
* @cpu: CPU to add this address space to

View file

@ -264,7 +264,6 @@ struct CPUState {
CPUAddressSpace *cpu_ases;
int num_ases;
AddressSpace *as;
struct AddressSpaceDispatch *memory_dispatch;
void *env_ptr; /* CPUArchState */
struct TranslationBlock *current_tb;

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_m68k
#define cpu_address_space_init cpu_address_space_init_m68k
#define cpu_exec_init_all cpu_exec_init_all_m68k
#define cpu_reload_memory_map cpu_reload_memory_map_m68k
#define vm_start vm_start_m68k
#define resume_all_vcpus resume_all_vcpus_m68k
#define a15_l2ctlr_read a15_l2ctlr_read_m68k

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_mips
#define cpu_address_space_init cpu_address_space_init_mips
#define cpu_exec_init_all cpu_exec_init_all_mips
#define cpu_reload_memory_map cpu_reload_memory_map_mips
#define vm_start vm_start_mips
#define resume_all_vcpus resume_all_vcpus_mips
#define a15_l2ctlr_read a15_l2ctlr_read_mips

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_mips64
#define cpu_address_space_init cpu_address_space_init_mips64
#define cpu_exec_init_all cpu_exec_init_all_mips64
#define cpu_reload_memory_map cpu_reload_memory_map_mips64
#define vm_start vm_start_mips64
#define resume_all_vcpus resume_all_vcpus_mips64
#define a15_l2ctlr_read a15_l2ctlr_read_mips64

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_mips64el
#define cpu_address_space_init cpu_address_space_init_mips64el
#define cpu_exec_init_all cpu_exec_init_all_mips64el
#define cpu_reload_memory_map cpu_reload_memory_map_mips64el
#define vm_start vm_start_mips64el
#define resume_all_vcpus resume_all_vcpus_mips64el
#define a15_l2ctlr_read a15_l2ctlr_read_mips64el

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_mipsel
#define cpu_address_space_init cpu_address_space_init_mipsel
#define cpu_exec_init_all cpu_exec_init_all_mipsel
#define cpu_reload_memory_map cpu_reload_memory_map_mipsel
#define vm_start vm_start_mipsel
#define resume_all_vcpus resume_all_vcpus_mipsel
#define a15_l2ctlr_read a15_l2ctlr_read_mipsel

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_powerpc
#define cpu_address_space_init cpu_address_space_init_powerpc
#define cpu_exec_init_all cpu_exec_init_all_powerpc
#define cpu_reload_memory_map cpu_reload_memory_map_powerpc
#define vm_start vm_start_powerpc
#define resume_all_vcpus resume_all_vcpus_powerpc
#define a15_l2ctlr_read a15_l2ctlr_read_powerpc

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_sparc
#define cpu_address_space_init cpu_address_space_init_sparc
#define cpu_exec_init_all cpu_exec_init_all_sparc
#define cpu_reload_memory_map cpu_reload_memory_map_sparc
#define vm_start vm_start_sparc
#define resume_all_vcpus resume_all_vcpus_sparc
#define a15_l2ctlr_read a15_l2ctlr_read_sparc

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_sparc64
#define cpu_address_space_init cpu_address_space_init_sparc64
#define cpu_exec_init_all cpu_exec_init_all_sparc64
#define cpu_reload_memory_map cpu_reload_memory_map_sparc64
#define vm_start vm_start_sparc64
#define resume_all_vcpus resume_all_vcpus_sparc64
#define a15_l2ctlr_read a15_l2ctlr_read_sparc64

View file

@ -49,7 +49,6 @@
#define memory_register_types memory_register_types_x86_64
#define cpu_address_space_init cpu_address_space_init_x86_64
#define cpu_exec_init_all cpu_exec_init_all_x86_64
#define cpu_reload_memory_map cpu_reload_memory_map_x86_64
#define vm_start vm_start_x86_64
#define resume_all_vcpus resume_all_vcpus_x86_64
#define a15_l2ctlr_read a15_l2ctlr_read_x86_64