diff --git a/proof/invariant-abstract/X64/ArchAcc_AI.thy b/proof/invariant-abstract/X64/ArchAcc_AI.thy index d874689b55..22d0edcfab 100644 --- a/proof/invariant-abstract/X64/ArchAcc_AI.thy +++ b/proof/invariant-abstract/X64/ArchAcc_AI.thy @@ -31,9 +31,6 @@ bundle unfold_objects_asm = kernel_object.split_asm[split] arch_kernel_obj.split_asm[split] -definition - "valid_asid asid s \ x64_asid_map (arch_state s) asid \ None" - definition "update_object ptr obj = do kobj <- get_object ptr; @@ -1549,52 +1546,6 @@ lemma image_in_rtrancl_image: definition "set_diff A B \ A - B \ (B - A)" -lemma set_pt_asid_map [wp]: - "\valid_asid_map and - (\s. (\a aobj ptr. aa_type nobj = AASIDPool \ ((a, ptr) \ graph_of (x64_asid_map (arch_state s)) \ ko_at (ArchObj aobj) p s) - \ ([VSRef (ucast (asid_high_bits_of a)) None], p) \ vs_asid_refs (x64_asid_table (arch_state s)) - \ (VSRef (a && mask asid_low_bits) (Some AASIDPool), ptr) \ set_diff (vs_refs (ArchObj nobj)) (vs_refs (ArchObj aobj)))) - \ update_object p (ArchObj nobj) \\_. valid_asid_map\" - apply (simp add: valid_asid_map_def vspace_at_asid_def update_object_def set_object_def) - apply (wp get_object_wp) - apply clarsimp - apply (drule(1) bspec) - apply clarsimp - apply (rule decrease_predictI) - apply force - apply (simp add: vs_lookup_def) - apply (rule image_in_rtrancl_image) - apply (clarsimp simp: vs_lookup_def) - apply (erule vs_lookup1_wellformed.lookupE) - apply (clarsimp simp: vs_lookup1_def vs_asid_refs_def) - apply (clarsimp simp: Image_def vs_asid_refs_def dest!: vs_lookup1D) - apply (rule bexI[rotated]) - apply simp - apply clarsimp - apply (case_tac "pa \ p") - apply (rule vs_lookup1I[rotated -1]) - apply simp - apply (simp add: obj_at_def) - apply (clarsimp simp: lookup_leaf_def obj_at_def - dest!: vs_lookup1_wellformed.lookup_rtrancl_stepD vs_lookup1D) - apply (clarsimp simp: lookup_leaf_def obj_at_def - dest!: vs_lookup1_wellformed.lookup_rtrancl_stepD vs_lookup1D) - apply (clarsimp simp: vs_lookup1_def) - apply (drule_tac x = a in spec) - apply (clarsimp simp: obj_at_def dest!:a_type_is_aobj) - apply (rule ccontr) - apply (elim allE) - apply (erule impE) - apply (rule context_conjI) - apply (rule ccontr) - apply (case_tac nobj) - apply ((fastforce simp: aa_type_simps a_type_def vs_refs_def image_def - split: kernel_object.split_asm if_split_asm arch_kernel_obj.split_asm)+)[6] - apply fastforce - apply (clarsimp simp: image_def graph_of_def) - apply (fastforce simp: set_diff_def image_def) - done - declare graph_of_None_update[simp] declare graph_of_Some_update[simp] @@ -1887,9 +1838,6 @@ lemma update_object_state_hyp_refs[wp]: lemma update_object_invs[wp]: "\ \s. invs s \ valid_table_caps_aobj (caps_of_state s) (arch_state s) (ArchObj obj) ptr \ (aa_type obj \ AASIDPool \ ptr \ global_refs s) - \ (\a aobj p. (aa_type obj = AASIDPool \ ((a, p) \ graph_of (x64_asid_map (arch_state s)) \ ko_at (ArchObj aobj) ptr s)) - \ ([VSRef (ucast (asid_high_bits_of a)) None], ptr) \ vs_asid_refs (x64_asid_table (arch_state s)) - \ (VSRef (a && mask asid_low_bits) (Some AASIDPool), p) \ set_diff (vs_refs (ArchObj obj)) (vs_refs (ArchObj aobj))) \ obj_at (\ko. case obj of PageMapL4 pm \ \pm'. ko = ArchObj (PageMapL4 pm') \ (\x\kernel_mapping_slots. pm x = pm' x) | _ \ True) ptr s \ valid_kernel_mappings_if_pm (set (second_level_tables (arch_state s))) (ArchObj obj) \ valid_global_objs_upd ptr obj (arch_state s) @@ -1921,8 +1869,8 @@ lemma update_object_invs[wp]: (\a b cap. caps_of_state s (a, b) = Some cap \ p \ obj_refs cap \ vs_cap_ref cap = Some rs))\ update_object ptr (ArchObj obj) \ \_. invs \" - apply (clarsimp simp: invs_def valid_state_def valid_pspace_def) - apply_trace (wp valid_irq_node_typ valid_irq_handlers_lift update_aobj_valid_global_vspace_mappings) + apply (clarsimp simp: invs_def valid_state_def valid_pspace_def valid_asid_map_def) + apply (wp valid_irq_node_typ valid_irq_handlers_lift update_aobj_valid_global_vspace_mappings) apply (clarsimp simp: valid_arch_state_def) done @@ -2179,17 +2127,6 @@ lemma vs_lookup_pages_arch_update: apply simp done - -lemma vs_lookup_asid_map [iff]: - "vs_lookup (s\arch_state := x64_asid_map_update f (arch_state s)\) = vs_lookup s" - by (simp add: vs_lookup_arch_update) - - -lemma vs_lookup_pages_asid_map[iff]: - "vs_lookup_pages (s\arch_state := x64_asid_map_update f (arch_state s)\) = - vs_lookup_pages s" - by (simp add: vs_lookup_pages_arch_update) - lemma valid_vspace_objs_arch_update: "x64_asid_table (f (arch_state s)) = x64_asid_table (arch_state s) \ valid_vspace_objs (arch_state_update f s) = valid_vspace_objs s" @@ -2828,48 +2765,6 @@ crunch v_ker_map[wp]: set_asid_pool "valid_kernel_mappings" (ignore: set_object wp: set_object_v_ker_map crunch_wps) -lemma set_asid_pool_restrict_asid_map: - "\valid_asid_map and ko_at (ArchObj (ASIDPool ap)) p and - (\s. \asid. asid \ mask asid_bits \ ucast asid \ S \ - x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some p \ - x64_asid_map (arch_state s) asid = None)\ - set_asid_pool p (ap |` S) \\_. valid_asid_map\" - apply (simp add: set_asid_pool_def valid_asid_map_def set_object_def update_object_def) - apply (wp get_object_wp) - apply (clarsimp split: kernel_object.splits arch_kernel_obj.splits - simp del: fun_upd_apply) - apply (drule(1) bspec) - apply (clarsimp simp: vspace_at_asid_def obj_at_def graph_of_def) - apply (drule subsetD, erule domI) - apply simp - apply (drule spec, drule(1) mp) - apply simp - apply (erule vs_lookupE) - apply (rule vs_lookupI, simp) - apply (clarsimp simp: vs_asid_refs_def graph_of_def) - apply (drule rtranclD) - apply (erule disjE, clarsimp) - apply clarsimp - apply (drule tranclD) - apply clarsimp - apply (rule r_into_rtrancl) - apply (drule vs_lookup1D) - apply clarsimp - apply (subst vs_lookup1_def) - apply (clarsimp simp: obj_at_def) - apply (erule rtranclE) - apply (clarsimp simp: vs_refs_def graph_of_def) - apply (rule image_eqI[where x="(_, _)"]) - apply (simp add: split_def) - apply (clarsimp simp: restrict_map_def) - apply (drule ucast_up_inj, simp) - apply (simp add: mask_asid_low_bits_ucast_ucast) - apply clarsimp - apply (drule vs_lookup1_trans_is_append) - apply clarsimp - apply (drule vs_lookup1D) - by clarsimp - lemma set_asid_pool_cur [wp]: "\\s. P (cur_thread s)\ set_asid_pool p a \\_ s. P (cur_thread s)\" unfolding set_asid_pool_def by (wpsimp wp: get_object_wp) @@ -3006,24 +2901,18 @@ lemma set_asid_pool_zombies_state_hyp_refs [wp]: by (simp add: update_object_state_hyp_refs) lemma set_asid_pool_invs_restrict: - "\invs and ko_at (ArchObj (ASIDPool ap)) p and - (\s. \asid. asid \ mask asid_bits \ ucast asid \ S \ - x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some p \ - x64_asid_map (arch_state s) asid = None)\ + "\invs and ko_at (ArchObj (ASIDPool ap)) p \ set_asid_pool p (ap |` S) \\_. invs\" - apply (simp add: invs_def valid_state_def valid_pspace_def + apply (simp add: invs_def valid_state_def valid_pspace_def valid_asid_map_def valid_arch_caps_def del: set_asid_pool_simpler_def) - apply (wp valid_irq_node_typ set_asid_pool_typ_at + apply (wp valid_irq_node_typ set_asid_pool_vspace_objs_unmap valid_irq_handlers_lift - set_asid_pool_vs_lookup_unmap set_asid_pool_restrict_asid_map + set_asid_pool_vs_lookup_unmap | simp del: set_asid_pool_simpler_def)+ done lemma set_asid_pool_invs_unmap: - "\invs and ko_at (ArchObj (ASIDPool ap)) p and - (\s. \asid. asid \ mask asid_bits \ ucast asid = x \ - x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some p \ - x64_asid_map (arch_state s) asid = None)\ + "\invs and ko_at (ArchObj (ASIDPool ap)) p\ set_asid_pool p (ap(x := None)) \\_. invs\" using set_asid_pool_invs_restrict[where S="- {x}"] by (simp add: restrict_map_def fun_upd_def if_flip) diff --git a/proof/invariant-abstract/X64/ArchArch_AI.thy b/proof/invariant-abstract/X64/ArchArch_AI.thy index 32d9be2c60..15e0759fc3 100644 --- a/proof/invariant-abstract/X64/ArchArch_AI.thy +++ b/proof/invariant-abstract/X64/ArchArch_AI.thy @@ -382,24 +382,7 @@ lemma valid_arch_caps: lemma valid_asid_map': "valid_asid_map s \ valid_asid_map s'" - using empty - apply (clarsimp simp: valid_asid_map_def s'_def) - apply (drule bspec, blast) - apply (clarsimp simp: vspace_at_asid_def) - apply (drule vs_lookup_2ConsD) - apply clarsimp - apply (erule vs_lookup_atE) - apply (drule vs_lookup1D) - apply clarsimp - apply (rule vs_lookupI[rotated]) - apply (rule r_into_rtrancl) - apply (rule vs_lookup1I) - apply (fastforce simp: obj_at_def) - apply assumption - apply simp - apply (clarsimp simp: vs_asid_refs_def graph_of_def) - apply fastforce - done + by (clarsimp simp: valid_asid_map_def) end diff --git a/proof/invariant-abstract/X64/ArchDetype_AI.thy b/proof/invariant-abstract/X64/ArchDetype_AI.thy index db42b03ab5..f95db69ad4 100644 --- a/proof/invariant-abstract/X64/ArchDetype_AI.thy +++ b/proof/invariant-abstract/X64/ArchDetype_AI.thy @@ -397,16 +397,7 @@ lemma valid_kernel_mappings_detype[detype_invs_proofs]: qed lemma valid_asid_map_detype[detype_invs_proofs]: "valid_asid_map (detype (untyped_range cap) s)" -proof - - have "valid_asid_map s" - using invs by (simp add: invs_def valid_state_def) - thus ?thesis - apply (clarsimp simp: valid_asid_map_def arch_state_det) - apply (drule bspec) - apply (blast) - apply (clarsimp simp: vspace_at_asid_def vs_lookup) - done - qed + by (simp add: valid_asid_map_def) lemma equal_kernel_mappings_detype[detype_invs_proofs]: "equal_kernel_mappings (detype (untyped_range cap) s)" diff --git a/proof/invariant-abstract/X64/ArchFinalise_AI.thy b/proof/invariant-abstract/X64/ArchFinalise_AI.thy index 1b04f5b822..8a5b46749a 100644 --- a/proof/invariant-abstract/X64/ArchFinalise_AI.thy +++ b/proof/invariant-abstract/X64/ArchFinalise_AI.thy @@ -75,51 +75,26 @@ lemmas nat_to_cref_unat_of_bl = nat_to_cref_unat_of_bl' [OF _ refl] lemma invs_x64_asid_table_unmap: "invs s \ is_aligned base asid_low_bits \ base \ mask asid_bits - \ (\x\set [0.e.2 ^ asid_low_bits - 1]. x64_asid_map (arch_state s) (base + x) = None) \ tab = x64_asid_table (arch_state s) \ invs (s\arch_state := arch_state s\x64_asid_table := tab(asid_high_bits_of base := None)\\)" apply (clarsimp simp: invs_def valid_state_def valid_arch_caps_def) - apply (strengthen valid_asid_map_unmap valid_vspace_objs_unmap_strg + apply (strengthen valid_vspace_objs_unmap_strg valid_vs_lookup_unmap_strg valid_arch_state_unmap_strg) apply (simp add: valid_irq_node_def valid_kernel_mappings_def - valid_global_objs_arch_update) + valid_global_objs_arch_update valid_asid_map_def) apply (simp add: valid_table_caps_def valid_machine_state_def second_level_tables_def) done -crunch asid_map[wp]: do_machine_op "valid_asid_map" - lemma delete_asid_pool_invs[wp]: "\invs and K (base \ mask asid_bits)\ delete_asid_pool base pptr \\rv. invs\" apply (simp add: delete_asid_pool_def) apply wp - apply (strengthen invs_x64_asid_table_unmap) - apply simp - apply (rule hoare_vcg_conj_lift, - (rule mapM_invalidate[where ptr=pptr, simplified])?, - ((wp mapM_wp' | simp add: if_apply_def2)+)[1])+ - apply wp+ + apply (strengthen invs_x64_asid_table_unmap) + apply (wpsimp wp: mapM_wp', simp) + apply (wp+)[3] apply (clarsimp simp: is_aligned_mask[symmetric]) - apply (rule conjI) - apply (rule vs_lookupI) - apply (erule vs_asid_refsI) - apply simp - apply clarsimp - done - -lemma invalidate_asid_entry_invalidates: - "\valid_asid_map and valid_arch_state and K (asid \ mask asid_bits) and - (\s. x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some ap)\ - invalidate_asid_entry asid pm - \\rv s. \asida. asida \ mask asid_bits \ - ucast asida = (ucast asid :: 9 word) \ - x64_asid_table (arch_state s) (asid_high_bits_of asida) = - Some ap \ - x64_asid_map (arch_state s) asida = None\" - apply (simp add: invalidate_asid_entry_def) - apply (wp invalidate_asid_invalidates) - apply clarsimp done lemma delete_asid_invs[wp]: @@ -127,10 +102,7 @@ lemma delete_asid_invs[wp]: delete_asid asid pd \\rv. invs\" apply (simp add: delete_asid_def cong: option.case_cong del: set_arch_obj_simps(5)) - apply (wp set_asid_pool_invs_unmap | wpc)+ - apply (wp invalidate_asid_entry_invalidates gets_wp hoare_vcg_const_imp_lift hoare_vcg_all_lift - hoare_vcg_imp_lift invalidate_asid_asid_map_None_inv)+ - apply (clarsimp simp del: fun_upd_apply simp: invs_valid_asid_map) + apply (wpsimp wp: set_asid_pool_invs_unmap) done crunch vs_lookup[wp]: set_vm_root "\s. P (vs_lookup s)" @@ -1564,37 +1536,6 @@ lemma set_asid_pool_empty_table_lookup: apply (simp add: vs_cap_ref_def) done -lemma set_asid_pool_empty_valid_asid_map: - "\\s. valid_asid_map s \ asid_pool_at p s - \ (\asid'. \ ([VSRef asid' None] \ p) s) - \ (\p'. \ ([VSRef (ucast (asid_high_bits_of base)) None] \ p') s)\ - set_asid_pool p empty - \\rv s. valid_asid_map (s\arch_state := arch_state s\x64_asid_table := - x64_asid_table (arch_state s)(asid_high_bits_of base \ p)\\)\" - apply (simp add: set_asid_pool_def set_object_def update_object_def) - apply (wp get_object_wp) - apply (clarsimp simp: valid_asid_map_def vspace_at_asid_def - dest!: graph_ofD - split: Structures_A.kernel_object.split_asm - arch_kernel_obj.split_asm ) - apply (drule bspec, erule graph_ofI) - apply (clarsimp dest!: vs_lookup_2ConsD vs_lookup1D) - apply (case_tac "p = pa") - apply simp - apply (clarsimp elim!: vs_lookup_atE) - apply (rule vs_lookupI[rotated]) - apply (rule r_into_rtrancl) - apply (rule_tac p=pa in vs_lookup1I) - apply (simp add: obj_at_def) - apply assumption - apply simp - apply (rule vs_asid_refsI) - apply (clarsimp simp: a_type_simps) - apply (drule vs_asid_refsI)+ - apply (drule vs_lookupI, rule rtrancl_refl) - apply simp - done - lemma set_asid_pool_invs_table: "\\s. invs s \ asid_pool_at p s \ (\p'. caps_of_state s p' = Some (ArchObjectCap (ASIDPoolCap p base))) @@ -1603,28 +1544,23 @@ lemma set_asid_pool_invs_table: set_asid_pool p empty \\x s. invs (s\arch_state := arch_state s\x64_asid_table := x64_asid_table (arch_state s)(asid_high_bits_of base \ p)\\)\" - apply (simp add: invs_def valid_state_def valid_pspace_def valid_arch_caps_def del:set_asid_pool_simpler_def) - apply (rule hoare_pre) - apply (wp valid_irq_node_typ set_asid_pool_typ_at - set_asid_pool_empty_table_objs - valid_irq_handlers_lift set_asid_pool_empty_table_lookup - set_asid_pool_empty_valid_asid_map + apply (simp add: invs_def valid_state_def valid_pspace_def valid_arch_caps_def valid_asid_map_def + del: set_asid_pool_simpler_def) + apply (wp valid_irq_node_typ set_asid_pool_typ_at + set_asid_pool_empty_table_objs + valid_irq_handlers_lift set_asid_pool_empty_table_lookup | strengthen valid_arch_state_table_strg)+ - apply (clarsimp simp: conj_comms) - apply (rule context_conjI) - apply clarsimp - apply (frule valid_vs_lookupD[OF vs_lookup_pages_vs_lookupI], clarsimp) - apply clarsimp - apply (drule obj_ref_elemD) - apply (frule(2) unique_table_refsD, - unfold obj_refs.simps aobj_ref.simps option.simps, - assumption) - apply (clarsimp simp:vs_cap_ref_def table_cap_ref_def - split:cap.split_asm arch_cap.split_asm) apply clarsimp apply (drule vs_asid_refsI) apply (drule vs_lookupI, rule rtrancl_refl) - apply simp + apply (frule valid_vs_lookupD[OF vs_lookup_pages_vs_lookupI], clarsimp) + apply clarsimp + apply (drule obj_ref_elemD) + apply (frule(2) unique_table_refsD, + unfold obj_refs.simps aobj_ref.simps option.simps, + assumption) + apply (clarsimp simp: vs_cap_ref_def table_cap_ref_def + split: cap.split_asm arch_cap.split_asm) done lemma delete_asid_pool_unmapped2: diff --git a/proof/invariant-abstract/X64/ArchInvariants_AI.thy b/proof/invariant-abstract/X64/ArchInvariants_AI.thy index 42f200f1dd..f8ce9a2a89 100644 --- a/proof/invariant-abstract/X64/ArchInvariants_AI.thy +++ b/proof/invariant-abstract/X64/ArchInvariants_AI.thy @@ -1219,10 +1219,7 @@ where definition valid_asid_map :: "'z::state_ext state \ bool" where - "valid_asid_map \ - \s. dom (x64_asid_map (arch_state s)) \ {0 .. mask asid_bits} \ - (\(asid, pd) \ graph_of (x64_asid_map (arch_state s)). - vspace_at_asid asid pd s \ asid \ 0)" + "valid_asid_map \ \s. True" section "Lemmas" diff --git a/proof/invariant-abstract/X64/ArchKHeap_AI.thy b/proof/invariant-abstract/X64/ArchKHeap_AI.thy index ec51bb4e98..2b564050aa 100644 --- a/proof/invariant-abstract/X64/ArchKHeap_AI.thy +++ b/proof/invariant-abstract/X64/ArchKHeap_AI.thy @@ -486,10 +486,7 @@ lemma valid_global_objs_lift_weak: lemma valid_asid_map_lift: "\valid_asid_map\ f \\rv. valid_asid_map\" - apply (simp add: valid_asid_map_def) - apply (rule hoare_lift_Pf[where f="arch_state", OF _ arch]) - apply (simp add: vspace_at_asid_def) - by (rule vs_lookup_vspace_obj_at_lift[OF aobj_at arch]) + by (wpsimp simp: valid_asid_map_def) lemma valid_kernel_mappings_lift: "\valid_kernel_mappings\ f \\rv. valid_kernel_mappings\" @@ -617,22 +614,6 @@ lemmas set_object_v_ker_map = set_object_valid_kernel_mappings [unfolded valid_kernel_mappings_if_pm_def] -lemma set_object_asid_map: - "\valid_asid_map and - obj_at (\ko'. vs_refs ko' \ vs_refs ko) p\ - set_object p ko - \\_. valid_asid_map\" - apply (simp add: valid_asid_map_def set_object_def) - apply wp - apply (clarsimp simp: vspace_at_asid_def simp del: fun_upd_apply) - apply (drule bspec, blast) - apply clarsimp - apply (rule vs_lookup_stateI, assumption) - apply (clarsimp simp: obj_at_def) - apply blast - apply simp - done - lemma set_object_equal_mappings: "\\s. equal_kernel_mappings s \ (\pml4. ko = ArchObj (PageMapL4 pml4) diff --git a/proof/invariant-abstract/X64/ArchRetype_AI.thy b/proof/invariant-abstract/X64/ArchRetype_AI.thy index 75957242e2..0eb499729d 100644 --- a/proof/invariant-abstract/X64/ArchRetype_AI.thy +++ b/proof/invariant-abstract/X64/ArchRetype_AI.thy @@ -389,7 +389,8 @@ lemma copy_global_invs_mappings_restricted: apply (rule mapM_x_wp[where S="{x. get_pml4_index pptr_base \ x \ x < 2 ^ (pml4_bits - word_size_bits)}"]) apply simp_all - apply (wpsimp wp: valid_irq_node_typ valid_irq_handlers_lift get_pml4e_wp simp: store_pml4e_def) + apply (wpsimp wp: valid_irq_node_typ valid_irq_handlers_lift get_pml4e_wp + simp: store_pml4e_def valid_asid_map_def) apply (clarsimp simp: valid_global_objs_def) apply (frule(1) invs_aligned_pml4D) apply (frule shiftl_less_t2n) @@ -970,21 +971,6 @@ lemma valid_kernel_mappings: aobject_type.split) done -lemma valid_asid_map: - "valid_asid_map s \ valid_asid_map s'" - apply (clarsimp simp: valid_asid_map_def) - apply (drule bspec, blast) - apply (clarsimp simp: vspace_at_asid_def) - apply (drule vs_lookup_2ConsD) - apply clarsimp - apply (erule vs_lookup_atE) - apply (drule vs_lookup1D) - apply clarsimp - apply (drule obj_at_pres) - apply (fastforce simp: vs_asid_refs_def graph_of_def - intro: vs_lookupI vs_lookup1I) - done - lemma equal_kernel_mappings: "equal_kernel_mappings s \ if ty = ArchObject PML4Obj @@ -1107,7 +1093,7 @@ lemma post_retype_invs: valid_vspace_objs' valid_irq_handlers valid_mdb_rep2 mdb_and_revokable valid_pspace cur_tcb only_idle - valid_kernel_mappings valid_asid_map + valid_kernel_mappings valid_asid_map_def valid_global_vspace_mappings valid_ioc vms pspace_in_kernel_window pspace_respects_device_region cap_refs_respects_device_region @@ -1206,7 +1192,7 @@ lemma invs_irq_state_independent: valid_irq_node_def valid_irq_handlers_def valid_machine_state_def valid_arch_caps_def valid_global_objs_def valid_kernel_mappings_def equal_kernel_mappings_def - valid_asid_map_def vspace_at_asid_def + vspace_at_asid_def pspace_in_kernel_window_def cap_refs_in_kernel_window_def cur_tcb_def sym_refs_def state_refs_of_def swp_def valid_irq_states_def) diff --git a/proof/invariant-abstract/X64/ArchVSpace_AI.thy b/proof/invariant-abstract/X64/ArchVSpace_AI.thy index ebc6f02fc6..5e67806b0d 100644 --- a/proof/invariant-abstract/X64/ArchVSpace_AI.thy +++ b/proof/invariant-abstract/X64/ArchVSpace_AI.thy @@ -67,9 +67,6 @@ definition crunch pspace_in_kernel_window[wp]: unmap_page, perform_page_invocation "pspace_in_kernel_window" (simp: crunch_simps wp: crunch_wps) -definition - "vspace_at_uniq asid pd \ \s. pd \ ran (x64_asid_map (arch_state s) |` (- {asid}))" - crunch inv[wp]: find_vspace_for_asid_assert "P" (simp: crunch_simps) @@ -152,23 +149,6 @@ lemma vspace_at_asid_unique2: done -lemma vspace_at_asid_uniq: - "\ vspace_at_asid asid pml4 s; asid \ mask asid_bits; valid_asid_map s; - unique_table_refs (caps_of_state s); valid_vs_lookup s; - valid_vspace_objs s; valid_global_objs s; valid_arch_state s \ - \ vspace_at_uniq asid pml4 s" - apply (clarsimp simp: vspace_at_uniq_def ran_option_map - dest!: ran_restrictD) - apply (clarsimp simp: valid_asid_map_def) - apply (drule bspec, erule graph_ofI) - apply clarsimp - apply (rule vspace_at_asid_unique, assumption+) - apply (drule subsetD, erule domI) - apply (simp add: mask_def) - apply (simp add: mask_def) - done - - lemma valid_vs_lookupE: "\ valid_vs_lookup s; \ref p. (ref \ p) s' \ (ref \ p) s; set (x64_global_pdpts (arch_state s)) \ set (x64_global_pdpts (arch_state s')); @@ -207,30 +187,6 @@ lemma find_vspace_for_asid_vspace_at_asid [wp]: crunch valid_vs_lookup[wp]: do_machine_op "valid_vs_lookup" -lemma valid_asid_mapD: - "\ x64_asid_map (arch_state s) asid = Some pml4; valid_asid_map s \ - \ vspace_at_asid asid pml4 s \ asid \ mask asid_bits" - by (auto simp add: valid_asid_map_def graph_of_def) - - -lemma pml4_cap_vspace_at_uniq: - "\ cte_wp_at (op = (ArchObjectCap (PML4Cap pml4 (Some asid)))) slot s; - valid_asid_map s; valid_vs_lookup s; unique_table_refs (caps_of_state s); - valid_arch_state s; valid_global_objs s; valid_objs s \ - \ vspace_at_uniq asid pml4 s" - apply (frule(1) cte_wp_at_valid_objs_valid_cap) - apply (clarsimp simp: vspace_at_uniq_def restrict_map_def valid_cap_def - elim!: ranE split: if_split_asm) - apply (drule(1) valid_asid_mapD) - apply (clarsimp simp: vspace_at_asid_def) - apply (frule(1) valid_vs_lookupD[OF vs_lookup_pages_vs_lookupI]) - apply (clarsimp simp: cte_wp_at_caps_of_state dest!: obj_ref_elemD) - apply (drule(1) unique_table_refsD[rotated, where cps="caps_of_state s"], - simp+) - apply (clarsimp simp: table_cap_ref_ap_eq[symmetric] table_cap_ref_def - split: cap.splits arch_cap.splits option.splits) - apply (drule(1) asid_low_high_bits, simp_all add: mask_def) - done lemma invalidateTLB_underlying_memory: "\\m'. underlying_memory m' p = um\ @@ -245,12 +201,6 @@ lemma vspace_at_asid_arch_up': by (clarsimp simp add: vspace_at_asid_def vs_lookup_def vs_lookup1_def) -lemma vspace_at_asid_arch_up: - "vspace_at_asid asid pml4 (s\arch_state := arch_state s \x64_asid_map := a\\) = - vspace_at_asid asid pml4 s" - by (simp add: vspace_at_asid_arch_up') - - lemmas ackInterrupt_irq_masks = no_irq[OF no_irq_ackInterrupt] @@ -374,59 +324,6 @@ lemma asid_high_bits_shl: done -lemma valid_asid_map_unmap: - "valid_asid_map s \ is_aligned base asid_low_bits \ base \ mask asid_bits \ - (\x \ set [0.e.2^asid_low_bits - 1]. x64_asid_map (arch_state s) (base + x) = None) \ - valid_asid_map(s\arch_state := arch_state s\x64_asid_table := (x64_asid_table (arch_state s))(asid_high_bits_of base := None)\\)" - apply (clarsimp simp: valid_asid_map_def vspace_at_asid_def) - apply (drule bspec, blast) - apply clarsimp - apply (erule vs_lookupE) - apply (clarsimp simp: vs_asid_refs_def dest!: graph_ofD) - apply (frule vs_lookup1_trans_is_append, clarsimp) - apply (drule ucast_up_inj, simp) - apply clarsimp - apply (rule_tac ref'="([VSRef (ucast (asid_high_bits_of a)) None],ba)" in vs_lookupI) - apply (simp add: vs_asid_refs_def) - apply (simp add: graph_of_def) - apply (rule_tac x="(asid_high_bits_of a, ba)" in image_eqI) - apply simp - apply clarsimp - apply (subgoal_tac "a \ mask asid_bits") - prefer 2 - apply fastforce - apply (drule_tac asid=a in ex_asid_high_bits_plus) - apply (clarsimp simp: asid_high_bits_shl) - apply (drule rtranclD, simp) - apply (drule tranclD) - apply clarsimp - apply (drule vs_lookup1D) - apply clarsimp - apply (frule vs_lookup1_trans_is_append, clarsimp) - apply (drule vs_lookup_trans_ptr_eq, clarsimp) - apply (rule r_into_rtrancl) - apply (rule vs_lookup1I) - apply simp - apply assumption - apply simp - done - -lemma invalidate_asid_asid_map_None_inv: - "\\s. x64_asid_map (arch_state s) y = None\ - invalidate_asid x - \\_ s. x64_asid_map (arch_state s) y = None\" - apply (simp add: invalidate_asid_def) - apply (wp ) - apply simp - done - -lemma invalidate_asid_entry_asid_map_None_inv: - "\\s. x64_asid_map (arch_state s) y = None\ - invalidate_asid_entry x z - \\_ s. x64_asid_map (arch_state s) y = None\" - apply (simp add: invalidate_asid_entry_def) - by (wp invalidate_asid_asid_map_None_inv) - crunch vs_lookup [wp]: invalidate_asid_entry "\s. P (vs_lookup s)" crunch aligned [wp]: invalidate_asid_entry pspace_aligned @@ -438,30 +335,6 @@ crunch caps_of_state[wp]: invalidate_asid_entry "\s. P (caps_of_state s) crunch vspace_objs [wp]: invalidate_asid_entry valid_vspace_objs (simp: valid_vspace_objs_arch_update) -lemma invalidate_asid_invalidates: - "\valid_asid_map and valid_arch_state and K (asid \ mask asid_bits) and - (\s. x64_asid_table (arch_state s) (asid_high_bits_of asid) = Some ap)\ - invalidate_asid asid - \\rv s. \asida. asida \ mask asid_bits \ - ucast asida = (ucast asid :: 9 word) \ - x64_asid_table (arch_state s) (asid_high_bits_of asida) = - Some ap \ - x64_asid_map (arch_state s) asida = None\" - apply (simp add: invalidate_asid_def) - apply wp - apply clarsimp - apply (clarsimp simp: valid_arch_state_def valid_asid_table_def) - apply (drule_tac x="asid_high_bits_of asid" and y="asid_high_bits_of asida" in inj_onD) - apply simp - apply blast - apply blast - apply clarsimp - apply (drule asid_low_high_bits', simp) - apply (fastforce simp: mask_def) - apply (fastforce simp: mask_def) - apply (erule (1) notE) - done - crunch typ_at [wp]: invalidate_asid_entry "\s. P (typ_at T p s)" lemmas invalidate_asid_entry_typ_ats [wp] = @@ -471,41 +344,8 @@ crunch cur [wp]: invalidate_asid_entry cur_tcb crunch valid_objs [wp]: invalidate_asid_entry valid_objs -lemma invalidate_asid_asid_map [wp]: - "\valid_asid_map\ invalidate_asid asid \\_. valid_asid_map\" - apply (simp add: invalidate_asid_def ) - apply (wp ) - apply (clarsimp simp: valid_asid_map_def simp del: fun_upd_apply) - apply (clarsimp simp: vspace_at_asid_def vs_lookup_arch_update) - apply auto - done - -crunch asid_map[wp]: invalidate_asid_entry "valid_asid_map" - crunch obj_at [wp]: invalidate_asid_entry "\s. P (obj_at Q p s)" -lemma invalidate_asid_invs [wp]: - "\invs\ invalidate_asid asid \\_. invs\" - apply (simp add: invalidate_asid_def) - apply (rule hoare_pre, wp ) - apply (clarsimp simp: invs_def valid_state_def valid_irq_node_def - valid_arch_caps_def valid_table_caps_def - valid_kernel_mappings_def valid_global_objs_def - valid_global_refs_def global_refs_def - valid_vs_lookup_def - vs_lookup_arch_update vs_lookup_pages_arch_update - valid_vspace_objs_def valid_arch_state_def - second_level_tables_def - simp del: fun_upd_apply) - apply (clarsimp simp: valid_asid_map_def valid_machine_state_def) - apply (rule conjI) - apply (erule order_trans[rotated], clarsimp) - apply (simp add: vspace_at_asid_arch_up') - done - -crunch valid_vs_lookup[wp]: invalidate_asid "valid_vs_lookup" - (simp: valid_vs_lookup_def) - crunch valid_vs_lookup[wp]: invalidate_asid_entry "valid_vs_lookup" crunch arm_asid_table_inv[wp]: invalidate_asid_entry @@ -533,51 +373,6 @@ lemma dmo_hwASIDInvalidate_invs[wp]: lemma invalidate_asid_entry_invs[wp]: "\invs\ invalidate_asid_entry asid vspace \\_. invs\" by (wpsimp simp: invalidate_asid_entry_def) -lemma mapM_invalidate: - "\[VSRef (ucast (asid_high_bits_of base)) None] \ ptr and - ko_at (ArchObj (arch_kernel_obj.ASIDPool pool)) ptr and - valid_asid_map and K (is_aligned base asid_low_bits)\ - mapM (\offset. when (\y. pool (ucast offset) = Some y) $ - invalidate_asid_entry (base + offset) (the (pool (ucast offset)))) - [0.e.2 ^ asid_low_bits - 1] - \\rv s. \x\2 ^ asid_low_bits - 1. x64_asid_map (arch_state s) (base + x) = None\" -proof - - have ball: "\P w::machine_word. (\x\w. P x) = (\x \ set [0.e.w]. P x)" by simp - show ?thesis - apply (subst ball) - apply (rule mapM_set) - apply (wp, simp) - apply (wp | simp add: invalidate_asid_entry_def invalidate_asid_def - cong: if_cong)+ - apply clarsimp - apply (rule ccontr) - apply clarsimp - apply (clarsimp simp: valid_asid_map_def) - apply (drule bspec, erule graph_ofI) - apply (erule vs_lookup_atE) - apply (clarsimp simp: vspace_at_asid_def) - apply (drule vs_lookup_2ConsD) - apply clarsimp - apply (erule vs_lookup_atE) - apply (drule vs_lookup1D) - apply clarsimp - apply (subgoal_tac "p' = ptr") - apply (clarsimp simp: obj_at_def vs_refs_def graph_of_def) - apply (subgoal_tac "base + x && mask asid_low_bits = x") - apply (simp add: ucast_ucast_mask) - apply (subgoal_tac "a && mask word_bits = a") - apply (simp add: word_bits_def) - apply (rule word_eqI) - apply (simp add: word_size word_bits_def) - apply (subst add_mask_eq, assumption+) - apply (simp add: asid_low_bits_def word_bits_def) - apply (rule refl) - apply (subst (asm) asid_high_bits_of_add, assumption+) - apply simp - apply (wp invalidate_asid_entry_asid_map_None_inv) - apply simp - done -qed lemma asid_low_bits_word_bits: "asid_low_bits < word_bits" @@ -625,12 +420,6 @@ lemma valid_vs_lookup_arch_update: \ valid_vs_lookup (arch_state_update f s) = valid_vs_lookup s" by (simp add: valid_vs_lookup_def vs_lookup_pages_arch_update) -lemma valid_asid_map_arch_update: - assumes "x64_asid_table (f (arch_state s)) = x64_asid_table (arch_state s)" - assumes "x64_asid_map (f (arch_state s)) = x64_asid_map (arch_state s)" - shows "valid_asid_map (arch_state_update f s) = valid_asid_map s" - by (simp add: assms valid_asid_map_def vspace_at_asid_def vs_lookup_arch_update) - crunch typ_at [wp]: find_vspace_for_asid "\s. P (typ_at T p s)" lemmas find_vspace_for_asid_typ_ats [wp] = abs_typ_at_lifts [OF find_vspace_for_asid_typ_at] @@ -1111,7 +900,6 @@ lemma get_current_cr3_rewrite_lift[wp]: lemma arch_state_update_invs: assumes "invs s" assumes "x64_asid_table (f (arch_state s)) = x64_asid_table (arch_state s)" - assumes "x64_asid_map (f (arch_state s)) = x64_asid_map (arch_state s)" assumes "valid_global_refs s \ valid_global_refs (arch_state_update f s)" assumes "valid_arch_state s \ valid_arch_state (arch_state_update f s)" assumes "valid_table_caps s \ valid_table_caps (arch_state_update f s)" @@ -1122,9 +910,8 @@ lemma arch_state_update_invs: assumes "cap_refs_in_kernel_window s \ cap_refs_in_kernel_window (arch_state_update f s)" shows "invs (arch_state_update f s)" using assms by (simp add: invs_def valid_state_def valid_irq_node_def valid_irq_states_def - valid_machine_state_def valid_arch_caps_def - valid_vspace_objs_arch_update valid_vs_lookup_arch_update - valid_asid_map_arch_update) + valid_machine_state_def valid_arch_caps_def valid_asid_map_def + valid_vspace_objs_arch_update valid_vs_lookup_arch_update) lemma set_current_cr3_invs[wp]: "\invs\ set_current_cr3 c \\rv. invs\" @@ -1138,55 +925,15 @@ lemma set_current_vspace_root_invs[wp]: apply (wp) done -lemma update_asid_map_valid_arch: - notes hoare_pre [wp_pre del] - shows "\valid_arch_state\ - update_asid_map asid - \\_. valid_arch_state\" - apply (simp add: update_asid_map_def) - apply (wp find_vspace_for_asid_assert_wp) - apply (simp add: valid_arch_state_def fun_upd_def[symmetric] comp_upd_simp) - done - -lemma update_asid_map_invs: - "\invs and K (asid \ mask asid_bits)\ update_asid_map asid \\rv. invs\" - apply (rule hoare_add_post) - apply (rule update_asid_map_valid_arch) - apply fastforce - apply (simp add: update_asid_map_def) - apply (wp find_vspace_for_asid_assert_wp) - apply (clarsimp simp: invs_def valid_state_def) - apply (simp add: valid_global_refs_def global_refs_def - valid_irq_node_def valid_vspace_objs_arch_update - valid_global_objs_def valid_arch_caps_def - valid_table_caps_def valid_kernel_mappings_def - valid_machine_state_def valid_vs_lookup_arch_update - second_level_tables_def) - apply (simp add: valid_asid_map_def fun_upd_def[symmetric] vspace_at_asid_arch_up) - done - -lemma update_asid_map_pred_tcb_at[wp]: - "\pred_tcb_at proj P t\ update_asid_map a \\_. pred_tcb_at proj P t\" - by (clarsimp simp: pred_tcb_at_def update_asid_map_def | wp)+ - lemma svr_invs [wp]: "\invs\ set_vm_root t' \\_. invs\" apply (simp add: set_vm_root_def) - apply (rule hoare_pre) - apply (wp hoare_whenE_wp find_vspace_for_asid_inv hoare_vcg_all_lift update_asid_map_invs - | wpc - | simp add: split_def if_apply_def2 cong: if_cong)+ - apply (rule_tac Q'="\_ s. invs s \ x2 \ mask asid_bits" in hoare_post_imp_R) - prefer 2 - apply simp - apply (rule valid_validE_R) - apply (wp find_vspace_for_asid_inv | simp add: split_def)+ - apply (rule_tac Q="\c s. invs s \ s \ c" in hoare_strengthen_post) - apply wp - apply (clarsimp simp: valid_cap_def mask_def) - by (fastforce) + apply (wp hoare_whenE_wp get_cap_wp + | wpc + | simp add: split_def if_apply_def2 cong: if_cong)+ + done -crunch pred_tcb_at[wp]: set_current_vspace_root, update_asid_map "pred_tcb_at proj P t" +crunch pred_tcb_at[wp]: set_current_vspace_root "pred_tcb_at proj P t" lemma svr_pred_st_tcb[wp]: "\pred_tcb_at proj P t\ set_vm_root t \\_. pred_tcb_at proj P t\" @@ -3975,7 +3722,7 @@ lemma asid_is_zeroI: lemma store_asid_pool_entry_invs: "\invs and K (asid \ mask asid_bits \ 0 < asid) and - (\s. case pml4base of None \ x64_asid_map (arch_state s) asid = None + (\s. case pml4base of None \ True | Some ptr \ obj_at (empty_table (set (second_level_tables (arch_state s)))) ptr s \ typ_at (AArch APageMapL4) ptr s \ (\pool. ko_at (ArchObj (ASIDPool pool)) p s \ pool (ucast asid) = None) \ p \ set (second_level_tables (arch_state s)) @@ -3983,48 +3730,32 @@ lemma store_asid_pool_entry_invs: and [VSRef (ucast (asid_high_bits_of asid)) None] \ p \ store_asid_pool_entry p asid pml4base \\_. invs\" - apply (simp add: store_asid_pool_entry_def) - apply (wp) + unfolding store_asid_pool_entry_def + apply simp + apply wp + apply simp apply (intro impI allI conjI valid_table_caps_aobj_upd_invalid_pml4e invs_valid_table_caps , simp_all add: obj_at_def) - apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask a_type_simps - split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs - | intro valid_table_caps_asid_upd)+ - apply (drule subsetD[OF vs_refs_of_set_diff]) - apply (frule invs_valid_asid_map) - apply (clarsimp dest!: asid_lvl_lookup1D - split: option.splits simp: ucast_ucast_id graph_of_def ucast_ucast_mask) - apply (clarsimp simp: asid_low_bits_def[symmetric]) - apply (drule asid_low_high_bits) - apply (drule(2) valid_asid_table_injD[OF _ _ invs_valid_asid_table]) - apply ((clarsimp dest!: valid_asid_mapD simp: mask_def)+)[4] - apply (drule subsetD[OF vs_refs_of_set_diff]) - apply (frule invs_valid_asid_map) - apply (clarsimp dest!: asid_lvl_lookup1D valid_asid_mapD - split: option.splits simp: ucast_ucast_id graph_of_def ucast_ucast_mask) - apply (clarsimp simp: asid_low_bits_def[symmetric] vspace_at_asid_def vs_lookup_def) - apply (erule wellformed_lookup.lookup_forwardE[OF vs_lookup1_is_wellformed_lookup] - | erule wellformed_lookup.lookupE[OF vs_lookup1_is_wellformed_lookup] - | clarsimp simp: vs_asid_refs_def up_ucast_inj_eq obj_at_def - dest!: vs_lookup1D graph_ofD)+ - apply (clarsimp simp: lookup_leaf_def vs_refs_def mask_asid_low_bits_ucast_ucast dest!: graph_ofD) + apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask a_type_simps + split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs + | intro valid_table_caps_asid_upd)+ apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs | intro valid_kernel_mappings_if_pm_asid invs_valid_kernel_mappings valid_global_objs_upd_invalid_asid_slot | fastforce)+ - apply (fastforce dest!: valid_vspace_obj_from_invs valid_vspace_obj_from_invs ran_del_subset - simp: obj_at_def)+ + apply (fastforce dest!: valid_vspace_obj_from_invs valid_vspace_obj_from_invs ran_del_subset + simp: obj_at_def)+ apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs) apply (rule ccontr) apply (erule(1) unmapped_cap_lookup_refs_empty[OF _ rtrancl_into_trancl1]) - apply (drule wellformed_lookup.lookup1_cut[OF vs_lookup1_is_wellformed_lookup],fastforce,fastforce) + apply (drule wellformed_lookup.lookup1_cut[OF vs_lookup1_is_wellformed_lookup],fastforce,fastforce) apply (clarsimp simp: aa_type_simps obj_at_def split: option.split_asm if_split_asm) apply (clarsimp simp: obj_at_def aa_type_simps aobj_upd_invalid_slots_simps ucast_ucast_mask - split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs) + split: if_split_asm option.split_asm dest:valid_vspace_obj_from_invs valid_obj_from_invs) apply (rule ccontr) apply (erule unmapped_cap_lookup_refs_pages_empty[OF _ ]) - apply (erule rtrancl_into_trancl1) - apply (drule wellformed_lookup.lookup1_cut[OF vs_lookup_pages1_is_wellformed_lookup],fastforce,fastforce) + apply (erule rtrancl_into_trancl1) + apply (drule wellformed_lookup.lookup1_cut[OF vs_lookup_pages1_is_wellformed_lookup],fastforce,fastforce) apply (clarsimp split: option.split_asm if_split_asm simp: aa_type_simps obj_at_def) apply (clarsimp split: if_split_asm option.split_asm dest!:ref_pages_Some simp: lookupable_refs_def obj_at_def) apply (frule(2) empty_table_lookup_refs_refl) @@ -4037,9 +3768,9 @@ lemma store_asid_pool_entry_invs: apply force apply force apply (drule(1) ref_is_unique) - apply force+ - apply (clarsimp dest!: invs_valid_objs valid_objs_caps) - apply clarsimp + apply force+ + apply (clarsimp dest!: invs_valid_objs valid_objs_caps) + apply clarsimp apply (drule asid_is_zeroI, simp+) apply (clarsimp simp: lookupable_refs_def obj_at_def split: option.split_asm if_split_asm) apply (frule(2) empty_table_lookup_refs_pages_refl) @@ -4049,7 +3780,7 @@ lemma store_asid_pool_entry_invs: apply force apply force apply (drule(1) ref_is_unique) - apply force+ + apply force+ apply (clarsimp dest!: invs_valid_objs valid_objs_caps) apply (clarsimp simp: cte_wp_at_caps_of_state is_cap_simps) apply (intro exI conjI)