messages
listlengths 2
2
|
|---|
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: alg: Vale.AES.AES_common_s.algorithm -> offset: Prims.int -> Vale.X64.Decls.va_code\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key\n round_keys keys_b hkeys_b total_bytes old_hash completed_quads h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n (va_QProc (va_code_Gcm_extra_bytes alg) ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem])\n (va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE) (va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE))\n//--\n//-- Gcm_blocks128\n\nval va_code_Gcm_blocks128 : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks128 alg =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_CCons\n (va_code_Ghash_buffer ()) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRbx)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR12)) (va_CCons (va_code_Gctr_blocks128 alg) (va_CNil ())))))))))\n\nval va_codegen_success_Gcm_blocks128 : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks128 alg =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax))\n (va_pbool_and (va_codegen_success_Ghash_buffer ()) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_pbool_and\n (va_codegen_success_Gctr_blocks128 alg) (va_ttrue ()))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks128 (va_mods:va_mods_t) (alg:algorithm) (in_b:buffer128) (out_b:buffer128)\n (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)\n : (va_quickCode unit (va_code_Gcm_blocks128 alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 274 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 275 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 276 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 277 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_buffer hkeys_b in_b h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_old_s))) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 278 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 279 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 280 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_QEmpty (()))))))))))\n\n\nval va_lemma_Gcm_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->\n out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks128 alg) va_s0 /\\ va_get_ok va_s0 /\\\n (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) /\\ va_state_eq va_sM (va_update_flags va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 10 va_sM (va_update_xmm 11 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR11 va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks128 va_b0 va_s0 alg in_b out_b key round_keys keys_b hkeys_b h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks128 va_mods alg in_b out_b key round_keys keys_b hkeys_b h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks128 alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 210 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (label va_range1\n \"***** POSTCONDITION NOT MET AT line 255 column 53 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 261 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 262 column 45 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx\n va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 265 column 93 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM == va_get_xmm 8 va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) out_b)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 267 column 131 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 > 0 ==> l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==>\n FStar.Seq.Base.length #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) (va_s0:va_state)\n (va_k:(va_state -> unit -> Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret) /\\ (forall (va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_rdi:nat64) (va_x_r11:nat64)\n (va_x_r10:nat64) (va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32)\n (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl\n (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11\n (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5\n (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1\n (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64\n rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbx\n va_x_rbx (va_upd_mem va_x_mem va_s0))))))))))))))))))) in va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq\n nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32 ->\n va_s0:va_state -> va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b\n h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks128 alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;\n va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;\n va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks128 (va_code_Gcm_blocks128 alg) va_s0 alg in_b out_b key\n round_keys keys_b hkeys_b h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 11 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6\n va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2\n va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR12 va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM (va_update_ok va_sM (va_update_mem\n va_sM va_s0))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) : (va_quickCode\n unit (va_code_Gcm_blocks128 alg)) =\n (va_QProc (va_code_Gcm_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;\n va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64\n rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem])\n (va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE)\n (va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE))\n//--\n//-- Gcm_blocks\n#push-options \"--z3rlimit 1000\"\n\nval va_code_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks alg offset =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9) (va_const_opr64\n 32)) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRbx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 0)) (va_CCons (va_code_Gcm_blocks_auth ()) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8)) (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp) (offset + 16))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 24)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13))\n (va_CCons (va_code_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64\n rR8) 0 Public) (va_CCons (va_code_Store128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_reg_opr64_reg64 rRbp) (va_op_xmm_xmm 1) 0 Secret) (va_CCons (va_code_Load_one_lsb\n (va_op_xmm_xmm 10)) (va_CCons (va_code_VPaddd (va_op_xmm_xmm 1) (va_op_xmm_xmm 1)\n (va_op_xmm_xmm 10)) (va_CCons (va_code_AES_GCM_decrypt_6mult alg) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11) (va_op_reg_opr64_reg64\n rRbp) 32 Secret) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp)\n (offset + 32)) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdi)\n (va_op_reg_opr64_reg64 rRsp) (offset + 40)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 48)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rR14) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_InitPshufbMask (va_op_xmm_xmm 9) (va_op_reg_opr64_reg64 rR12)) (va_CCons\n (va_code_Pshufb (va_op_xmm_xmm 11) (va_op_xmm_xmm 9)) (va_CCons (va_code_Gcm_blocks128 alg)\n (va_CCons (va_code_Stack_lemma ()) (va_CCons (va_code_Add64 (va_op_dst_opr64_reg64 rR14)\n (va_opr_code_Stack (va_op_reg64_reg64 rRsp) (offset + 24) Public)) (va_CCons (va_code_IMul64\n (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rR13) (va_op_reg_opr64_reg64 rRsp) (offset + 64)) (va_CCons (va_IfElse\n (va_cmp_gt (va_op_cmp_reg64 rR13) (va_op_cmp_reg64 rR14)) (va_Block (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 56))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR10) (va_op_opr64_reg64 rR13)) (va_CCons\n (va_code_And64 (va_op_dst_opr64_reg64 rR10) (va_const_opr64 15)) (va_CCons\n (va_code_Gcm_extra_bytes alg) (va_CCons (va_Block (va_CNil ())) (va_CNil ()))))))) (va_Block\n (va_CNil ()))) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15))\n (va_CCons (va_code_Gcm_make_length_quad ()) (va_CCons (va_code_Ghash_register ()) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRbp) 0 Secret) (va_CCons (va_code_Gctr_register alg) (va_CCons (va_Block (va_CNil ()))\n (va_CNil ()))))))))))))))))))))))))))))))))))))\n\nval va_codegen_success_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks alg offset =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9)\n (va_const_opr64 32)) (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64\n rRbx) (va_op_reg_opr64_reg64 rRsp) (offset + 0)) (va_pbool_and\n (va_codegen_success_Gcm_blocks_auth ()) (va_pbool_and (va_codegen_success_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8)) (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp)\n (offset + 16)) (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRdx)\n (va_op_reg_opr64_reg64 rRsp) (offset + 24)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13)) (va_pbool_and (va_codegen_success_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and (va_codegen_success_Load128_buffer\n (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64 rR8) 0 Public)\n (va_pbool_and (va_codegen_success_Store128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_reg_opr64_reg64 rRbp) (va_op_xmm_xmm 1) 0 Secret) (va_pbool_and\n (va_codegen_success_Load_one_lsb (va_op_xmm_xmm 10)) (va_pbool_and (va_codegen_success_VPaddd\n (va_op_xmm_xmm 1) (va_op_xmm_xmm 1) (va_op_xmm_xmm 10)) (va_pbool_and\n (va_codegen_success_AES_GCM_decrypt_6mult alg) (va_pbool_and (va_codegen_success_Load128_buffer\n (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11) (va_op_reg_opr64_reg64 rRbp) 32 Secret)\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRax)\n (va_op_reg_opr64_reg64 rRsp) (offset + 32)) (va_pbool_and (va_codegen_success_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 40)) (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 48)) (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR14)\n (va_op_opr64_reg64 rRdx)) (va_pbool_and (va_codegen_success_InitPshufbMask (va_op_xmm_xmm 9)\n (va_op_reg_opr64_reg64 rR12)) (va_pbool_and (va_codegen_success_Pshufb (va_op_xmm_xmm 11)\n (va_op_xmm_xmm 9)) (va_pbool_and (va_codegen_success_Gcm_blocks128 alg) (va_pbool_and\n (va_codegen_success_Stack_lemma ()) (va_pbool_and (va_codegen_success_Add64\n (va_op_dst_opr64_reg64 rR14) (va_opr_code_Stack (va_op_reg64_reg64 rRsp) (offset + 24) Public))\n (va_pbool_and (va_codegen_success_IMul64 (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rR13)\n (va_op_reg_opr64_reg64 rRsp) (offset + 64)) (va_pbool_and (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp)\n (offset + 56)) (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR10)\n (va_op_opr64_reg64 rR13)) (va_pbool_and (va_codegen_success_And64 (va_op_dst_opr64_reg64 rR10)\n (va_const_opr64 15)) (va_codegen_success_Gcm_extra_bytes alg)))) (va_pbool_and\n (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15)) (va_pbool_and\n (va_codegen_success_Gcm_make_length_quad ()) (va_pbool_and (va_codegen_success_Ghash_register\n ()) (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64 rRbp) 0 Secret) (va_pbool_and\n (va_codegen_success_Gctr_register alg) (va_ttrue ()))))))))))))))))))))))))))))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks (va_mods:va_mods_t) (alg:algorithm) (offset:int) (auth_b:buffer128)\n (abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128)\n (out128_b:buffer128) (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit\n (va_code_Gcm_blocks alg offset)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 0) (va_get_stack va_s) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 8) (va_get_stack va_s) in let\n (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 16) (va_get_stack va_s) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 24) (va_get_stack va_s) in let\n (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 32) (va_get_stack va_s) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 40) (va_get_stack va_s) in let\n (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset\n + 48) (va_get_stack va_s) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 56) (va_get_stack va_s) in let\n (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s\n + offset + 64) (va_get_stack va_s) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_old_s)) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 463 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 464 column 13 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9) (va_const_opr64 32))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 465 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRbx) (va_op_reg_opr64_reg64 rRsp) (offset + 0))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 466 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE) (fun (va_s:va_state)\n (auth_quad_seq:(seq quad32)) -> let (y_0:quad32) = Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 0 0 0 0 in let (y_auth_bytes:quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 473 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 474 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp) (offset + 16))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 475 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 24))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 476 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 477 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> let\n (iv_BE:Vale.X64.Decls.quad32) = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2\n va_old_s) in let (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32\n iv_BE 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 483 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64\n rR8) 0 Public iv_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 485 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_reg_opr64_reg64 rRbp)\n (va_op_xmm_xmm 1) 0 Secret scratch_b 0) (fun (va_s:va_state) _ -> let (j0:quad32) = va_get_xmm\n 1 va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 487 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load_one_lsb (va_op_xmm_xmm 10)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 489 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_VPaddd (va_op_xmm_xmm 1) (va_op_xmm_xmm 1) (va_op_xmm_xmm 10)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 491 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AES_GCM_decrypt_6mult alg h_LE iv_b in128x6_b out128x6_b scratch_b key round_keys\n keys_b hkeys_b) (fun (va_s:va_state) _ -> let (y_cipher128x6:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in let (auth_in:(seq quad32)) =\n auth_quad_seq in let (va_arg138:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b in let\n (va_arg137:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg136:Vale.Def.Types_s.quad32) = y_auth_bytes in let (va_arg135:Vale.Def.Types_s.quad32) =\n y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 494 column 36 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_ghash_incremental0_append h_LE va_arg135 va_arg136\n y_cipher128x6 va_arg137 va_arg138) (let auth_in = FStar.Seq.Base.append #quad32 auth_in\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 498 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11)\n (va_op_reg_opr64_reg64 rRbp) 32 Secret scratch_b 2) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 499 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 500 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 32))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 501 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 40))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 502 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 48))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 503 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR14) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 504 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_InitPshufbMask (va_op_xmm_xmm 9) (va_op_reg_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 505 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 11) (va_op_xmm_xmm 9)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 506 column 18 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks128 alg in128_b out128_b key round_keys keys_b hkeys_b h_LE) (fun\n (va_s:va_state) _ -> let (y_cipher128:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in let (va_arg134:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b in let\n (va_arg133:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg132:Vale.Def.Types_s.quad32) = y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 508 column 36 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_ghash_incremental0_append h_LE va_arg132 y_cipher128x6\n y_cipher128 va_arg133 va_arg134) (let auth_in = FStar.Seq.Base.append #quad32 auth_in\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 512 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Stack_lemma (va_op_reg64_reg64 rRsp) (offset + 24) Public) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 512 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Add64 (va_op_dst_opr64_reg64 rR14) (va_opr_code_Stack (va_op_reg64_reg64 rRsp)\n (offset + 24) Public)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 513 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_IMul64 (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 514 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rR13) (va_op_reg_opr64_reg64 rRsp) (offset + 64))\n (fun (va_s:va_state) _ -> let (y_inout:Vale.Def.Types_s.quad32) = y_cipher128 in let\n (plain_byte_seq:(seq quad32)) = empty_seq_quad32 in let (cipher_byte_seq:(seq quad32)) =\n empty_seq_quad32 in let (va_arg131:Vale.Def.Types_s.quad32) = va_get_xmm 11 va_s in let\n (va_arg130:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32)) = key in let\n (va_arg129:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = cipher_byte_seq in let\n (va_arg128:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = plain_byte_seq in let\n (va_arg127:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 519 column 29 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_opaque_init va_arg127 va_arg128 va_arg129 va_arg130\n va_arg131) (let (total_bytes:(va_int_at_least 0)) = FStar.Seq.Base.length #quad32 auth_quad_seq\n `op_Multiply` 16 + plain_num_bytes in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 523 column 8 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_qIf va_mods (Cmp_gt (va_op_cmp_reg64 rR13) (va_op_cmp_reg64 rR14)) (qblock va_mods (fun\n (va_s:va_state) -> va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 525 column 21 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 56))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 526 column 14 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR10) (va_op_opr64_reg64 rR13)) (fun (va_s:va_state) _\n -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 527 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.Poly1305.Math.lemma_poly_bits64 ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 528 column 14 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_And64 (va_op_dst_opr64_reg64 rR10) (va_const_opr64 15)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 532 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes y_0 auth_in\n h_LE) (fun (va_s:va_state) _ -> let y_inout = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm\n 8 va_s) in let (raw_auth_quads:(FStar.Seq.Base.seq quad32)) = FStar.Seq.Base.append #quad32\n auth_in (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b) in va_qAssertSquash\n va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 536 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun a_1906 (s_1907:(FStar.Seq.Base.seq a_1906)) (i_1908:Prims.nat) (j_1909:Prims.nat) -> let\n (j_1869:Prims.nat) = j_1909 in Prims.b2t (Prims.op_AmpAmp (Prims.op_LessThanOrEqual i_1908\n j_1869) (Prims.op_LessThanOrEqual j_1869 (FStar.Seq.Base.length #a_1906 s_1907))))\n Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 total_bytes)\n (fun _ -> let (auth_input_bytes:(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)) =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n raw_auth_quads) 0 total_bytes in let (padded_auth_bytes:(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in let auth_in =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes in let plain_byte_seq =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b in let cipher_byte_seq =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QEmpty ((auth_in,\n cipher_byte_seq, plain_byte_seq, y_inout)))))))))) (qblock va_mods (fun (va_s:va_state) ->\n va_QEmpty ((auth_in, cipher_byte_seq, plain_byte_seq, y_inout))))) (fun (va_s:va_state) va_g ->\n let ((auth_in:(seq quad32)), (cipher_byte_seq:(seq quad32)), (plain_byte_seq:(seq quad32)),\n (y_inout:Vale.Def.Types_s.quad32)) = va_g in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 547 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 548 column 25 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_make_length_quad ()) (fun (va_s:va_state) _ -> let\n (length_quad32:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 0\n va_s) in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 551 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_register hkeys_b h_LE y_inout) (fun (va_s:va_state) _ -> let\n (y_final:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s)\n in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 554 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRbp) 0 Secret scratch_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 557 column 18 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_register alg key round_keys keys_b) (fun (va_s:va_state) _ -> let\n (va_arg126:Vale.Def.Types_s.quad32) = va_get_xmm 8 va_s in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 560 column 40 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.Arch.Types.le_seq_quad32_to_bytes_of_singleton va_arg126)\n (va_qAssertSquash va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 561 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun (icb_BE_677:Vale.Def.Types_s.quad32) (plain_LE_678:Vale.Def.Types_s.quad32)\n (alg_679:Vale.AES.AES_common_s.algorithm) (key_680:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32))\n (i_681:Prims.int) -> Vale.AES.AES_s.is_aes_key_LE alg_679 key_680) j0 y_final alg key 0) (fun _\n -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 561 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 8 va_s == Vale.AES.GCTR_s.gctr_encrypt_block j0 y_final alg key 0) (let\n (plain128:(FStar.Seq.Base.seq Vale.X64.Decls.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in let\n (cipher128:(FStar.Seq.Base.seq Vale.X64.Decls.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) in128_b) in va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 566 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.length #quad32 plain_byte_seq == 0 ==> FStar.Seq.Base.equal\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 plain128 plain_byte_seq)\n plain128) (va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 567 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.length #quad32 cipher_byte_seq == 0 ==> FStar.Seq.Base.equal\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 cipher128 cipher_byte_seq)\n cipher128) (let (va_arg125:Vale.Def.Types_s.quad32) = Vale.AES.GCTR.inc32lite ctr_BE_2 len128x6\n in let (va_arg124:Vale.Def.Types_s.quad32) = ctr_BE_2 in let (va_arg123:(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat32)) = key in let (va_arg122:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32))\n = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b in let\n (va_arg121:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) in128_b in let (va_arg120:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) out128x6_b in let\n (va_arg119:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_old_s) in128x6_b in let (va_arg118:Prims.nat) = len128 in let\n (va_arg117:Prims.nat) = len128x6 in let (va_arg116:Vale.AES.AES_common_s.algorithm) = alg in\n va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 569 column 30 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.lemma_gctr_partial_append va_arg116 va_arg117 va_arg118\n va_arg119 va_arg120 va_arg121 va_arg122 va_arg123 va_arg124 va_arg125) (let\n (va_arg115:Vale.Def.Types_s.quad32) = Vale.AES.GCTR.inc32lite (Vale.AES.GCTR.inc32lite ctr_BE_2\n len128x6) len128 in let (va_arg114:Vale.Def.Types_s.quad32) = ctr_BE_2 in let\n (va_arg113:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32)) = key in let\n (va_arg112:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = cipher_byte_seq in let\n (va_arg111:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = plain_byte_seq in let\n (va_arg110:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b) in let\n (va_arg109:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in let (va_arg108:Prims.nat) =\n FStar.Seq.Base.length #quad32 plain_byte_seq in let (va_arg107:Prims.nat) = len128x6 + len128\n in let (va_arg106:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 575 column 30 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.lemma_gctr_partial_append va_arg106 va_arg107 va_arg108\n va_arg109 va_arg110 va_arg111 va_arg112 va_arg113 va_arg114 va_arg115) (let\n (va_arg105:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg104:Vale.Def.Types_s.quad32) = y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 583 column 23 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_hash_append2 h_LE va_arg104 y_inout y_final va_arg105\n length_quad32) (let auth_in = FStar.Seq.Base.append #quad32 auth_in (FStar.Seq.Base.create\n #Vale.Def.Types_s.quad32 1 length_quad32) in let (va_arg103:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = auth_in in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 585 column 31 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.ghash_incremental_to_ghash h_LE va_arg103) (va_QEmpty\n (()))))))))))))))))))))))))))))))))))))))))))))))))\n\n\nval va_lemma_Gcm_blocks : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> offset:int ->\n auth_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->\n in128_b:buffer128 -> out128_b:buffer128 -> inout_b:buffer128 -> iv_b:buffer128 ->\n scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks alg offset) va_s0 /\\ va_get_ok va_s0 /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in sse_enabled /\\ movbe_enabled /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0)\n Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp\n va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))))))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet\n 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 iv_b\n (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0)\n (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32\n /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let\n (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in let\n (plain_in:(seq quad32)) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) else FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in let (cipher_out:(seq quad32)) = (if (plain_num_bytes\n > (len128x6 + len128) `op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) else\n FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM)\n out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) in let\n (cipher_bound:nat) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128 `op_Division`\n 8) then (len128x6 + len128 + 1) else (len128x6 + len128)) in Vale.AES.GCTR.gctr_partial alg\n cipher_bound plain_in cipher_out key ctr_BE_2 /\\ (let (length_quad:quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0)\n auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let\n (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in let\n (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes in let\n (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 (FStar.Seq.Base.append #quad32\n auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in let (total_bytes:nat) =\n FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 + plain_num_bytes in let\n (raw_quad_seq:(seq quad32)) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) then (let (ab:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32 raw_quad_seq\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let (pb:(seq\n nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32 pb) else\n raw_quad_seq) in let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 raw_quad_seq\n (FStar.Seq.Base.create #quad32 1 length_quad) in va_get_xmm 8 va_sM ==\n Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE h_LE auth_quad_seq) alg\n key 0))) /\\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0)))))))))))))))))))))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks va_b0 va_s0 alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;\n va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14;\n va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;\n va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;\n va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx;\n va_Mod_reg64 rRax; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks va_mods alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks alg offset) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 283 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n let (h_LE:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s0)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 396 column 56 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 397 column 52 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 398 column 57 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 399 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 400 column 58 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet\n 6 va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 403 column 39 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (plain_num_bytes < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 404 column 38 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRsi va_s0 < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 406 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 408 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (ctr_BE_1:quad32) = iv_BE in label va_range1\n \"***** POSTCONDITION NOT MET AT line 409 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 412 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (plain_in:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 421 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (cipher_out:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_sM) out128_b)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 430 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (cipher_bound:nat) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> len128x6 + len128 + 1) (fun _ -> len128x6 + len128) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 434 column 77 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg cipher_bound plain_in cipher_out key ctr_BE_2) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 438 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (length_quad:quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply`\n plain_num_bytes) 0) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 440 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1\n va_s0) auth_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 444 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 445 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 446 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes\n in label va_range1\n \"***** POSTCONDITION NOT MET AT line 448 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 (FStar.Seq.Base.append #quad32\n auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 452 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (total_bytes:nat) = FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 +\n plain_num_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 453 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_quad_seq:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply`\n 128 `op_Division` 8) (fun _ -> let (ab:(seq nat8)) = FStar.Seq.Base.slice\n #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32\n raw_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let\n (pb:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32\n pb) (fun _ -> raw_quad_seq) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 460 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 raw_quad_seq\n (FStar.Seq.Base.create #quad32 1 length_quad) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 461 column 106 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 8 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE\n h_LE auth_quad_seq) alg key 0)))))))))))))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit ->\n Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n let (h_LE:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s0)) in sse_enabled /\\\n movbe_enabled /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0)\n (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64\n (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)))) /\\ (forall (va_x_mem:vale_heap)\n (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64)\n (va_x_rsi:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)\n (va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)\n (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)\n (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)\n (va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)\n (va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_heap1:vale_heap) (va_x_heap2:vale_heap)\n (va_x_heap3:vale_heap) (va_x_heap5:vale_heap) (va_x_heap6:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 6\n va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 3 va_x_heap3\n (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 15 va_x_xmm15\n (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11\n va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8\n (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4\n (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0\n (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13\n (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10\n (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64\n rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx\n va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem\n va_s0))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\\ (let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet\n 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 iv_b\n (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0)\n (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32\n /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let\n (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in let\n (plain_in:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in let (cipher_out:(seq quad32)) = va_if\n (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128 `op_Division` 8) (fun _ ->\n FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM)\n inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) in let (cipher_bound:nat) = va_if (plain_num_bytes > (len128x6 + len128)\n `op_Multiply` 128 `op_Division` 8) (fun _ -> len128x6 + len128 + 1) (fun _ -> len128x6 +\n len128) in Vale.AES.GCTR.gctr_partial alg cipher_bound plain_in cipher_out key ctr_BE_2 /\\ (let\n (length_quad:quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1\n va_s0) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice\n #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64\n rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits\n auth_input_bytes in let (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32\n padded_auth_bytes in let (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32\n (FStar.Seq.Base.append #quad32 auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0)\n in128x6_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in let (total_bytes:nat)\n = FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 + plain_num_bytes in let\n (raw_quad_seq:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> let (ab:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32 raw_quad_seq\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let (pb:(seq\n nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32 pb) (fun\n _ -> raw_quad_seq) in let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32\n raw_quad_seq (FStar.Seq.Base.create #quad32 1 length_quad) in va_get_xmm 8 va_sM ==\n Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE h_LE auth_quad_seq) alg\n key 0))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks : alg:algorithm -> offset:int -> auth_b:buffer128 -> abytes_b:buffer128\n -> in128x6_b:buffer128 -> out128x6_b:buffer128 -> in128_b:buffer128 -> out128_b:buffer128 ->\n inout_b:buffer128 -> iv_b:buffer128 -> scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq\n quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit ->\n Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b\n in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks alg offset) ([va_Mod_flags;\n va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2;\n va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm\n 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15;\n va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10;\n va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0\n va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b\n iv_b scratch_b key round_keys keys_b hkeys_b va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks (va_code_Gcm_blocks alg offset) va_s0 alg offset auth_b\n abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b\n hkeys_b in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0))))))))))))))))))))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit (va_code_Gcm_blocks alg\n offset)) =\n (va_QProc (va_code_Gcm_blocks alg offset) ([va_Mod_flags; va_Mod_mem_heaplet 6;\n va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;\n va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;\n va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64\n rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64\n rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64\n rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks alg offset auth_b\n abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b\n hkeys_b) (va_wpProof_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b))\n#pop-options\n//--\n//-- Gcm_blocks_wrapped\n#push-options \"--z3rlimit 60\"\n\nval va_code_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> Tot va_code\n\nPartial implementation:\nlet va_code_Gcm_blocks_wrapped alg offset =\n \n"
},
{
"role": "assistant",
"content": "(va_Block (va_CCons (va_code_Gcm_blocks alg offset)\n (va_CCons (va_Block (va_CNil ())) (va_CCons (va_Block (va_CNil ())) (va_CNil ())))))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: a: Type -> x: a -> st: Type0 -> DM4F_layered5.repr a st (DM4F_layered5.return_wp x)\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nPartial implementation:\nlet return (a: Type) (x: a) (st: Type0) : repr a st (return_wp x) =\n \n"
},
{
"role": "assistant",
"content": "fun s0 -> (x, s0)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: alg: Vale.AES.AES_common_s.algorithm -> offset: Prims.int -> Vale.X64.Decls.va_pbool\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key\n round_keys keys_b hkeys_b total_bytes old_hash completed_quads h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n (va_QProc (va_code_Gcm_extra_bytes alg) ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem])\n (va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE) (va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE))\n//--\n//-- Gcm_blocks128\n\nval va_code_Gcm_blocks128 : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks128 alg =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_CCons\n (va_code_Ghash_buffer ()) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRbx)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR12)) (va_CCons (va_code_Gctr_blocks128 alg) (va_CNil ())))))))))\n\nval va_codegen_success_Gcm_blocks128 : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks128 alg =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax))\n (va_pbool_and (va_codegen_success_Ghash_buffer ()) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_pbool_and\n (va_codegen_success_Gctr_blocks128 alg) (va_ttrue ()))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks128 (va_mods:va_mods_t) (alg:algorithm) (in_b:buffer128) (out_b:buffer128)\n (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)\n : (va_quickCode unit (va_code_Gcm_blocks128 alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 274 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 275 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 276 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 277 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_buffer hkeys_b in_b h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_old_s))) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 278 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 279 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 280 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_QEmpty (()))))))))))\n\n\nval va_lemma_Gcm_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->\n out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks128 alg) va_s0 /\\ va_get_ok va_s0 /\\\n (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) /\\ va_state_eq va_sM (va_update_flags va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 10 va_sM (va_update_xmm 11 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR11 va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks128 va_b0 va_s0 alg in_b out_b key round_keys keys_b hkeys_b h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks128 va_mods alg in_b out_b key round_keys keys_b hkeys_b h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks128 alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 210 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (label va_range1\n \"***** POSTCONDITION NOT MET AT line 255 column 53 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 261 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 262 column 45 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx\n va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 265 column 93 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM == va_get_xmm 8 va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) out_b)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 267 column 131 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 > 0 ==> l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==>\n FStar.Seq.Base.length #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) (va_s0:va_state)\n (va_k:(va_state -> unit -> Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret) /\\ (forall (va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_rdi:nat64) (va_x_r11:nat64)\n (va_x_r10:nat64) (va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32)\n (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl\n (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11\n (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5\n (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1\n (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64\n rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbx\n va_x_rbx (va_upd_mem va_x_mem va_s0))))))))))))))))))) in va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq\n nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32 ->\n va_s0:va_state -> va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b\n h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks128 alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;\n va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;\n va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks128 (va_code_Gcm_blocks128 alg) va_s0 alg in_b out_b key\n round_keys keys_b hkeys_b h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 11 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6\n va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2\n va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR12 va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM (va_update_ok va_sM (va_update_mem\n va_sM va_s0))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) : (va_quickCode\n unit (va_code_Gcm_blocks128 alg)) =\n (va_QProc (va_code_Gcm_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;\n va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64\n rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem])\n (va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE)\n (va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE))\n//--\n//-- Gcm_blocks\n#push-options \"--z3rlimit 1000\"\n\nval va_code_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks alg offset =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9) (va_const_opr64\n 32)) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRbx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 0)) (va_CCons (va_code_Gcm_blocks_auth ()) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8)) (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp) (offset + 16))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 24)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13))\n (va_CCons (va_code_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64\n rR8) 0 Public) (va_CCons (va_code_Store128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_reg_opr64_reg64 rRbp) (va_op_xmm_xmm 1) 0 Secret) (va_CCons (va_code_Load_one_lsb\n (va_op_xmm_xmm 10)) (va_CCons (va_code_VPaddd (va_op_xmm_xmm 1) (va_op_xmm_xmm 1)\n (va_op_xmm_xmm 10)) (va_CCons (va_code_AES_GCM_decrypt_6mult alg) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11) (va_op_reg_opr64_reg64\n rRbp) 32 Secret) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp)\n (offset + 32)) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdi)\n (va_op_reg_opr64_reg64 rRsp) (offset + 40)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 48)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rR14) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_InitPshufbMask (va_op_xmm_xmm 9) (va_op_reg_opr64_reg64 rR12)) (va_CCons\n (va_code_Pshufb (va_op_xmm_xmm 11) (va_op_xmm_xmm 9)) (va_CCons (va_code_Gcm_blocks128 alg)\n (va_CCons (va_code_Stack_lemma ()) (va_CCons (va_code_Add64 (va_op_dst_opr64_reg64 rR14)\n (va_opr_code_Stack (va_op_reg64_reg64 rRsp) (offset + 24) Public)) (va_CCons (va_code_IMul64\n (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rR13) (va_op_reg_opr64_reg64 rRsp) (offset + 64)) (va_CCons (va_IfElse\n (va_cmp_gt (va_op_cmp_reg64 rR13) (va_op_cmp_reg64 rR14)) (va_Block (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 56))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR10) (va_op_opr64_reg64 rR13)) (va_CCons\n (va_code_And64 (va_op_dst_opr64_reg64 rR10) (va_const_opr64 15)) (va_CCons\n (va_code_Gcm_extra_bytes alg) (va_CCons (va_Block (va_CNil ())) (va_CNil ()))))))) (va_Block\n (va_CNil ()))) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15))\n (va_CCons (va_code_Gcm_make_length_quad ()) (va_CCons (va_code_Ghash_register ()) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRbp) 0 Secret) (va_CCons (va_code_Gctr_register alg) (va_CCons (va_Block (va_CNil ()))\n (va_CNil ()))))))))))))))))))))))))))))))))))))\n\nval va_codegen_success_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks alg offset =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9)\n (va_const_opr64 32)) (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64\n rRbx) (va_op_reg_opr64_reg64 rRsp) (offset + 0)) (va_pbool_and\n (va_codegen_success_Gcm_blocks_auth ()) (va_pbool_and (va_codegen_success_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8)) (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp)\n (offset + 16)) (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRdx)\n (va_op_reg_opr64_reg64 rRsp) (offset + 24)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13)) (va_pbool_and (va_codegen_success_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and (va_codegen_success_Load128_buffer\n (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64 rR8) 0 Public)\n (va_pbool_and (va_codegen_success_Store128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_reg_opr64_reg64 rRbp) (va_op_xmm_xmm 1) 0 Secret) (va_pbool_and\n (va_codegen_success_Load_one_lsb (va_op_xmm_xmm 10)) (va_pbool_and (va_codegen_success_VPaddd\n (va_op_xmm_xmm 1) (va_op_xmm_xmm 1) (va_op_xmm_xmm 10)) (va_pbool_and\n (va_codegen_success_AES_GCM_decrypt_6mult alg) (va_pbool_and (va_codegen_success_Load128_buffer\n (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11) (va_op_reg_opr64_reg64 rRbp) 32 Secret)\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRax)\n (va_op_reg_opr64_reg64 rRsp) (offset + 32)) (va_pbool_and (va_codegen_success_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 40)) (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 48)) (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR14)\n (va_op_opr64_reg64 rRdx)) (va_pbool_and (va_codegen_success_InitPshufbMask (va_op_xmm_xmm 9)\n (va_op_reg_opr64_reg64 rR12)) (va_pbool_and (va_codegen_success_Pshufb (va_op_xmm_xmm 11)\n (va_op_xmm_xmm 9)) (va_pbool_and (va_codegen_success_Gcm_blocks128 alg) (va_pbool_and\n (va_codegen_success_Stack_lemma ()) (va_pbool_and (va_codegen_success_Add64\n (va_op_dst_opr64_reg64 rR14) (va_opr_code_Stack (va_op_reg64_reg64 rRsp) (offset + 24) Public))\n (va_pbool_and (va_codegen_success_IMul64 (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rR13)\n (va_op_reg_opr64_reg64 rRsp) (offset + 64)) (va_pbool_and (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp)\n (offset + 56)) (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR10)\n (va_op_opr64_reg64 rR13)) (va_pbool_and (va_codegen_success_And64 (va_op_dst_opr64_reg64 rR10)\n (va_const_opr64 15)) (va_codegen_success_Gcm_extra_bytes alg)))) (va_pbool_and\n (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15)) (va_pbool_and\n (va_codegen_success_Gcm_make_length_quad ()) (va_pbool_and (va_codegen_success_Ghash_register\n ()) (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64 rRbp) 0 Secret) (va_pbool_and\n (va_codegen_success_Gctr_register alg) (va_ttrue ()))))))))))))))))))))))))))))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks (va_mods:va_mods_t) (alg:algorithm) (offset:int) (auth_b:buffer128)\n (abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128)\n (out128_b:buffer128) (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit\n (va_code_Gcm_blocks alg offset)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 0) (va_get_stack va_s) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 8) (va_get_stack va_s) in let\n (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 16) (va_get_stack va_s) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 24) (va_get_stack va_s) in let\n (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 32) (va_get_stack va_s) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 40) (va_get_stack va_s) in let\n (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset\n + 48) (va_get_stack va_s) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 56) (va_get_stack va_s) in let\n (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s\n + offset + 64) (va_get_stack va_s) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_old_s)) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 463 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 464 column 13 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9) (va_const_opr64 32))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 465 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRbx) (va_op_reg_opr64_reg64 rRsp) (offset + 0))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 466 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE) (fun (va_s:va_state)\n (auth_quad_seq:(seq quad32)) -> let (y_0:quad32) = Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 0 0 0 0 in let (y_auth_bytes:quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 473 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 474 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp) (offset + 16))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 475 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 24))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 476 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 477 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> let\n (iv_BE:Vale.X64.Decls.quad32) = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2\n va_old_s) in let (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32\n iv_BE 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 483 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64\n rR8) 0 Public iv_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 485 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_reg_opr64_reg64 rRbp)\n (va_op_xmm_xmm 1) 0 Secret scratch_b 0) (fun (va_s:va_state) _ -> let (j0:quad32) = va_get_xmm\n 1 va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 487 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load_one_lsb (va_op_xmm_xmm 10)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 489 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_VPaddd (va_op_xmm_xmm 1) (va_op_xmm_xmm 1) (va_op_xmm_xmm 10)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 491 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AES_GCM_decrypt_6mult alg h_LE iv_b in128x6_b out128x6_b scratch_b key round_keys\n keys_b hkeys_b) (fun (va_s:va_state) _ -> let (y_cipher128x6:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in let (auth_in:(seq quad32)) =\n auth_quad_seq in let (va_arg138:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b in let\n (va_arg137:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg136:Vale.Def.Types_s.quad32) = y_auth_bytes in let (va_arg135:Vale.Def.Types_s.quad32) =\n y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 494 column 36 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_ghash_incremental0_append h_LE va_arg135 va_arg136\n y_cipher128x6 va_arg137 va_arg138) (let auth_in = FStar.Seq.Base.append #quad32 auth_in\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 498 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11)\n (va_op_reg_opr64_reg64 rRbp) 32 Secret scratch_b 2) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 499 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 500 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 32))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 501 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 40))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 502 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 48))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 503 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR14) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 504 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_InitPshufbMask (va_op_xmm_xmm 9) (va_op_reg_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 505 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 11) (va_op_xmm_xmm 9)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 506 column 18 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks128 alg in128_b out128_b key round_keys keys_b hkeys_b h_LE) (fun\n (va_s:va_state) _ -> let (y_cipher128:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in let (va_arg134:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b in let\n (va_arg133:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg132:Vale.Def.Types_s.quad32) = y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 508 column 36 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_ghash_incremental0_append h_LE va_arg132 y_cipher128x6\n y_cipher128 va_arg133 va_arg134) (let auth_in = FStar.Seq.Base.append #quad32 auth_in\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 512 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Stack_lemma (va_op_reg64_reg64 rRsp) (offset + 24) Public) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 512 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Add64 (va_op_dst_opr64_reg64 rR14) (va_opr_code_Stack (va_op_reg64_reg64 rRsp)\n (offset + 24) Public)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 513 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_IMul64 (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 514 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rR13) (va_op_reg_opr64_reg64 rRsp) (offset + 64))\n (fun (va_s:va_state) _ -> let (y_inout:Vale.Def.Types_s.quad32) = y_cipher128 in let\n (plain_byte_seq:(seq quad32)) = empty_seq_quad32 in let (cipher_byte_seq:(seq quad32)) =\n empty_seq_quad32 in let (va_arg131:Vale.Def.Types_s.quad32) = va_get_xmm 11 va_s in let\n (va_arg130:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32)) = key in let\n (va_arg129:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = cipher_byte_seq in let\n (va_arg128:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = plain_byte_seq in let\n (va_arg127:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 519 column 29 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_opaque_init va_arg127 va_arg128 va_arg129 va_arg130\n va_arg131) (let (total_bytes:(va_int_at_least 0)) = FStar.Seq.Base.length #quad32 auth_quad_seq\n `op_Multiply` 16 + plain_num_bytes in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 523 column 8 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_qIf va_mods (Cmp_gt (va_op_cmp_reg64 rR13) (va_op_cmp_reg64 rR14)) (qblock va_mods (fun\n (va_s:va_state) -> va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 525 column 21 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 56))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 526 column 14 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR10) (va_op_opr64_reg64 rR13)) (fun (va_s:va_state) _\n -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 527 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.Poly1305.Math.lemma_poly_bits64 ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 528 column 14 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_And64 (va_op_dst_opr64_reg64 rR10) (va_const_opr64 15)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 532 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes y_0 auth_in\n h_LE) (fun (va_s:va_state) _ -> let y_inout = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm\n 8 va_s) in let (raw_auth_quads:(FStar.Seq.Base.seq quad32)) = FStar.Seq.Base.append #quad32\n auth_in (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b) in va_qAssertSquash\n va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 536 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun a_1906 (s_1907:(FStar.Seq.Base.seq a_1906)) (i_1908:Prims.nat) (j_1909:Prims.nat) -> let\n (j_1869:Prims.nat) = j_1909 in Prims.b2t (Prims.op_AmpAmp (Prims.op_LessThanOrEqual i_1908\n j_1869) (Prims.op_LessThanOrEqual j_1869 (FStar.Seq.Base.length #a_1906 s_1907))))\n Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 total_bytes)\n (fun _ -> let (auth_input_bytes:(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)) =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n raw_auth_quads) 0 total_bytes in let (padded_auth_bytes:(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in let auth_in =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes in let plain_byte_seq =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b in let cipher_byte_seq =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QEmpty ((auth_in,\n cipher_byte_seq, plain_byte_seq, y_inout)))))))))) (qblock va_mods (fun (va_s:va_state) ->\n va_QEmpty ((auth_in, cipher_byte_seq, plain_byte_seq, y_inout))))) (fun (va_s:va_state) va_g ->\n let ((auth_in:(seq quad32)), (cipher_byte_seq:(seq quad32)), (plain_byte_seq:(seq quad32)),\n (y_inout:Vale.Def.Types_s.quad32)) = va_g in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 547 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 548 column 25 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_make_length_quad ()) (fun (va_s:va_state) _ -> let\n (length_quad32:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 0\n va_s) in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 551 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_register hkeys_b h_LE y_inout) (fun (va_s:va_state) _ -> let\n (y_final:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s)\n in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 554 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRbp) 0 Secret scratch_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 557 column 18 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_register alg key round_keys keys_b) (fun (va_s:va_state) _ -> let\n (va_arg126:Vale.Def.Types_s.quad32) = va_get_xmm 8 va_s in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 560 column 40 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.Arch.Types.le_seq_quad32_to_bytes_of_singleton va_arg126)\n (va_qAssertSquash va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 561 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun (icb_BE_677:Vale.Def.Types_s.quad32) (plain_LE_678:Vale.Def.Types_s.quad32)\n (alg_679:Vale.AES.AES_common_s.algorithm) (key_680:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32))\n (i_681:Prims.int) -> Vale.AES.AES_s.is_aes_key_LE alg_679 key_680) j0 y_final alg key 0) (fun _\n -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 561 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 8 va_s == Vale.AES.GCTR_s.gctr_encrypt_block j0 y_final alg key 0) (let\n (plain128:(FStar.Seq.Base.seq Vale.X64.Decls.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in let\n (cipher128:(FStar.Seq.Base.seq Vale.X64.Decls.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) in128_b) in va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 566 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.length #quad32 plain_byte_seq == 0 ==> FStar.Seq.Base.equal\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 plain128 plain_byte_seq)\n plain128) (va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 567 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.length #quad32 cipher_byte_seq == 0 ==> FStar.Seq.Base.equal\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 cipher128 cipher_byte_seq)\n cipher128) (let (va_arg125:Vale.Def.Types_s.quad32) = Vale.AES.GCTR.inc32lite ctr_BE_2 len128x6\n in let (va_arg124:Vale.Def.Types_s.quad32) = ctr_BE_2 in let (va_arg123:(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat32)) = key in let (va_arg122:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32))\n = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b in let\n (va_arg121:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) in128_b in let (va_arg120:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) out128x6_b in let\n (va_arg119:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_old_s) in128x6_b in let (va_arg118:Prims.nat) = len128 in let\n (va_arg117:Prims.nat) = len128x6 in let (va_arg116:Vale.AES.AES_common_s.algorithm) = alg in\n va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 569 column 30 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.lemma_gctr_partial_append va_arg116 va_arg117 va_arg118\n va_arg119 va_arg120 va_arg121 va_arg122 va_arg123 va_arg124 va_arg125) (let\n (va_arg115:Vale.Def.Types_s.quad32) = Vale.AES.GCTR.inc32lite (Vale.AES.GCTR.inc32lite ctr_BE_2\n len128x6) len128 in let (va_arg114:Vale.Def.Types_s.quad32) = ctr_BE_2 in let\n (va_arg113:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32)) = key in let\n (va_arg112:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = cipher_byte_seq in let\n (va_arg111:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = plain_byte_seq in let\n (va_arg110:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b) in let\n (va_arg109:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in let (va_arg108:Prims.nat) =\n FStar.Seq.Base.length #quad32 plain_byte_seq in let (va_arg107:Prims.nat) = len128x6 + len128\n in let (va_arg106:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 575 column 30 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.lemma_gctr_partial_append va_arg106 va_arg107 va_arg108\n va_arg109 va_arg110 va_arg111 va_arg112 va_arg113 va_arg114 va_arg115) (let\n (va_arg105:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg104:Vale.Def.Types_s.quad32) = y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 583 column 23 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_hash_append2 h_LE va_arg104 y_inout y_final va_arg105\n length_quad32) (let auth_in = FStar.Seq.Base.append #quad32 auth_in (FStar.Seq.Base.create\n #Vale.Def.Types_s.quad32 1 length_quad32) in let (va_arg103:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = auth_in in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 585 column 31 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.ghash_incremental_to_ghash h_LE va_arg103) (va_QEmpty\n (()))))))))))))))))))))))))))))))))))))))))))))))))\n\n\nval va_lemma_Gcm_blocks : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> offset:int ->\n auth_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->\n in128_b:buffer128 -> out128_b:buffer128 -> inout_b:buffer128 -> iv_b:buffer128 ->\n scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks alg offset) va_s0 /\\ va_get_ok va_s0 /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in sse_enabled /\\ movbe_enabled /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0)\n Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp\n va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))))))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet\n 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 iv_b\n (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0)\n (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32\n /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let\n (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in let\n (plain_in:(seq quad32)) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) else FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in let (cipher_out:(seq quad32)) = (if (plain_num_bytes\n > (len128x6 + len128) `op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) else\n FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM)\n out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) in let\n (cipher_bound:nat) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128 `op_Division`\n 8) then (len128x6 + len128 + 1) else (len128x6 + len128)) in Vale.AES.GCTR.gctr_partial alg\n cipher_bound plain_in cipher_out key ctr_BE_2 /\\ (let (length_quad:quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0)\n auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let\n (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in let\n (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes in let\n (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 (FStar.Seq.Base.append #quad32\n auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in let (total_bytes:nat) =\n FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 + plain_num_bytes in let\n (raw_quad_seq:(seq quad32)) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) then (let (ab:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32 raw_quad_seq\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let (pb:(seq\n nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32 pb) else\n raw_quad_seq) in let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 raw_quad_seq\n (FStar.Seq.Base.create #quad32 1 length_quad) in va_get_xmm 8 va_sM ==\n Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE h_LE auth_quad_seq) alg\n key 0))) /\\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0)))))))))))))))))))))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks va_b0 va_s0 alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;\n va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14;\n va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;\n va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;\n va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx;\n va_Mod_reg64 rRax; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks va_mods alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks alg offset) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 283 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n let (h_LE:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s0)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 396 column 56 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 397 column 52 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 398 column 57 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 399 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 400 column 58 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet\n 6 va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 403 column 39 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (plain_num_bytes < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 404 column 38 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRsi va_s0 < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 406 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 408 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (ctr_BE_1:quad32) = iv_BE in label va_range1\n \"***** POSTCONDITION NOT MET AT line 409 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 412 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (plain_in:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 421 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (cipher_out:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_sM) out128_b)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 430 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (cipher_bound:nat) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> len128x6 + len128 + 1) (fun _ -> len128x6 + len128) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 434 column 77 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg cipher_bound plain_in cipher_out key ctr_BE_2) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 438 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (length_quad:quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply`\n plain_num_bytes) 0) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 440 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1\n va_s0) auth_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 444 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 445 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 446 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes\n in label va_range1\n \"***** POSTCONDITION NOT MET AT line 448 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 (FStar.Seq.Base.append #quad32\n auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 452 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (total_bytes:nat) = FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 +\n plain_num_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 453 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_quad_seq:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply`\n 128 `op_Division` 8) (fun _ -> let (ab:(seq nat8)) = FStar.Seq.Base.slice\n #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32\n raw_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let\n (pb:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32\n pb) (fun _ -> raw_quad_seq) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 460 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 raw_quad_seq\n (FStar.Seq.Base.create #quad32 1 length_quad) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 461 column 106 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 8 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE\n h_LE auth_quad_seq) alg key 0)))))))))))))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit ->\n Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n let (h_LE:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s0)) in sse_enabled /\\\n movbe_enabled /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0)\n (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64\n (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)))) /\\ (forall (va_x_mem:vale_heap)\n (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64)\n (va_x_rsi:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)\n (va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)\n (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)\n (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)\n (va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)\n (va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_heap1:vale_heap) (va_x_heap2:vale_heap)\n (va_x_heap3:vale_heap) (va_x_heap5:vale_heap) (va_x_heap6:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 6\n va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 3 va_x_heap3\n (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 15 va_x_xmm15\n (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11\n va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8\n (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4\n (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0\n (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13\n (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10\n (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64\n rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx\n va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem\n va_s0))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\\ (let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet\n 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 iv_b\n (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0)\n (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32\n /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let\n (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in let\n (plain_in:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in let (cipher_out:(seq quad32)) = va_if\n (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128 `op_Division` 8) (fun _ ->\n FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM)\n inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) in let (cipher_bound:nat) = va_if (plain_num_bytes > (len128x6 + len128)\n `op_Multiply` 128 `op_Division` 8) (fun _ -> len128x6 + len128 + 1) (fun _ -> len128x6 +\n len128) in Vale.AES.GCTR.gctr_partial alg cipher_bound plain_in cipher_out key ctr_BE_2 /\\ (let\n (length_quad:quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1\n va_s0) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice\n #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64\n rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits\n auth_input_bytes in let (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32\n padded_auth_bytes in let (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32\n (FStar.Seq.Base.append #quad32 auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0)\n in128x6_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in let (total_bytes:nat)\n = FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 + plain_num_bytes in let\n (raw_quad_seq:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> let (ab:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32 raw_quad_seq\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let (pb:(seq\n nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32 pb) (fun\n _ -> raw_quad_seq) in let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32\n raw_quad_seq (FStar.Seq.Base.create #quad32 1 length_quad) in va_get_xmm 8 va_sM ==\n Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE h_LE auth_quad_seq) alg\n key 0))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks : alg:algorithm -> offset:int -> auth_b:buffer128 -> abytes_b:buffer128\n -> in128x6_b:buffer128 -> out128x6_b:buffer128 -> in128_b:buffer128 -> out128_b:buffer128 ->\n inout_b:buffer128 -> iv_b:buffer128 -> scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq\n quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit ->\n Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b\n in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks alg offset) ([va_Mod_flags;\n va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2;\n va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm\n 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15;\n va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10;\n va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0\n va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b\n iv_b scratch_b key round_keys keys_b hkeys_b va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks (va_code_Gcm_blocks alg offset) va_s0 alg offset auth_b\n abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b\n hkeys_b in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0))))))))))))))))))))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit (va_code_Gcm_blocks alg\n offset)) =\n (va_QProc (va_code_Gcm_blocks alg offset) ([va_Mod_flags; va_Mod_mem_heaplet 6;\n va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;\n va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;\n va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64\n rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64\n rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64\n rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks alg offset auth_b\n abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b\n hkeys_b) (va_wpProof_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b))\n#pop-options\n//--\n//-- Gcm_blocks_wrapped\n#push-options \"--z3rlimit 60\"\n\nval va_code_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks_wrapped alg offset =\n (va_Block (va_CCons (va_code_Gcm_blocks alg offset) (va_CCons (va_Block (va_CNil ())) (va_CCons\n (va_Block (va_CNil ())) (va_CNil ())))))\n\nval va_codegen_success_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> Tot va_pbool\n\nPartial implementation:\nlet va_codegen_success_Gcm_blocks_wrapped alg offset =\n \n"
},
{
"role": "assistant",
"content": "(va_pbool_and (va_codegen_success_Gcm_blocks alg offset) (va_ttrue ()))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: b: Prims.bool -> w1: DM4F_layered5.wp st a -> w2: DM4F_layered5.wp st a -> DM4F_layered5.wp st a\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nPartial implementation:\nlet ite_wp #a #st (b: bool) (w1: wp st a) (w2: wp st a) : wp st a =\n \n"
},
{
"role": "assistant",
"content": "fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: w1: DM4F_layered5.wp st a -> w2: (_: a -> DM4F_layered5.wp st b) -> DM4F_layered5.wp st b\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nPartial implementation:\nlet bind_wp (#a #b: Type) (#st: Type0) (w1: wp st a) (w2: (a -> wp st b)) : wp st b =\n \n"
},
{
"role": "assistant",
"content": "fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: c: Pulse.Syntax.Base.comp_st -> post: Pulse.Syntax.Base.term\n -> c':\n Pulse.Syntax.Base.comp_st\n { Pulse.Syntax.Base.st_comp_of_comp c' ==\n (let _ = Pulse.Syntax.Base.st_comp_of_comp c in\n Pulse.Syntax.Base.Mkst_comp (Mkst_comp?.u _) (Mkst_comp?.res _) (Mkst_comp?.pre _) post) }\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nPartial implementation:\nlet comp_st_with_post (c: comp_st) (post: term)\n : c': comp_st{st_comp_of_comp c' == ({ st_comp_of_comp c with post = post } <: st_comp)} =\n \n"
},
{
"role": "assistant",
"content": "match c with\n| C_ST st -> C_ST ({ st with post = post })\n| C_STGhost st -> C_STGhost ({ st with post = post })\n| C_STAtomic i obs st -> C_STAtomic i obs ({ st with post = post })"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: w: ID5.wp a -> DM4F_layered5.wp st a\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nPartial implementation:\nlet lift_id_st_wp #a #st (w: ID5.wp a) : wp st a =\n \n"
},
{
"role": "assistant",
"content": "elim_pure_wp_monotonicity_forall ();\nfun s0 p -> w (fun x -> p x s0)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: DM4F_layered5.wp st a\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nlet lift_id_st_wp #a #st (w : ID5.wp a) : wp st a =\n elim_pure_wp_monotonicity_forall ();\n fun s0 p -> w (fun x -> p x s0)\n\nlet lift_id_st a wp st (f : ID5.repr a wp)\n : repr a st (lift_id_st_wp wp)\n = fun s0 -> ID5.ID?.reflect (ID5.bind _ _ _ _ f (fun x -> ID5.return _ (x, s0)))\n\nsub_effect ID ~> ST = lift_id_st\n\nPartial implementation:\nlet null #st #a : wp st a =\n \n"
},
{
"role": "assistant",
"content": "fun s0 p -> forall x s1. p x s1"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n a: Type ->\n b: Type ->\n st: Type0 ->\n wp_c: DM4F_layered5.wp st a ->\n wp_f: (_: a -> DM4F_layered5.wp st b) ->\n c: DM4F_layered5.repr a st wp_c ->\n f: (x: a -> DM4F_layered5.repr b st (wp_f x))\n -> DM4F_layered5.repr b st (DM4F_layered5.bind_wp wp_c wp_f)\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nPartial implementation:\nlet bind\n (a b: Type)\n (st: Type0)\n (wp_c: wp st a)\n (wp_f: (a -> wp st b))\n (c: repr a st wp_c)\n (f: (x: a -> repr b st (wp_f x)))\n : repr b st (bind_wp wp_c wp_f) =\n \n"
},
{
"role": "assistant",
"content": "fun s0 ->\n let r = c s0 in\n f (fst r) (snd r)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n a: Type ->\n st: Type0 ->\n wpf: DM4F_layered5.wp st a ->\n wpg: DM4F_layered5.wp st a ->\n f: DM4F_layered5.repr a st wpf\n -> DM4F_layered5.repr a st wpg\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nPartial implementation:\nlet subcomp\n (a: Type)\n (st: Type0)\n (wpf wpg: wp st a)\n (#[@@@ refine]u: squash (stronger wpg wpf))\n (f: repr a st wpf)\n : repr a st wpg =\n \n"
},
{
"role": "assistant",
"content": "f"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: b: FStar.Bytes.bytes -> i: Prims.nat{i < FStar.Bytes.length b} -> FStar.Bytes.byte\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2017 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n(*\nA standard library for manipulation of value bytes.\n\nThis model is realized by Bytes.bytes in OCaml and by\nstruct {uintX_t size; char *bytes} (or similar) in C.\n\nThis file is essentially a specialized version of FStar.Seq,\nwith lemmas and refinements taylored for typical operations on\nbytes, and with support for machine integers and C-extractible versions\n(which Seq does not provide.)\n\n@summary Value bytes standard library\n*)\nmodule FStar.Bytes\n\nmodule S = FStar.Seq\nmodule U = FStar.UInt\nmodule U8 = FStar.UInt8\nmodule U16 = FStar.UInt16\nmodule U32 = FStar.UInt32\nmodule U64 = FStar.UInt64\nmodule Str = FStar.String\nmodule Chr = FStar.Char\n\nunfold let u8 = U8.t\nunfold let u16 = U16.t\nunfold let u32 = U32.t\n\n(** Realized by uint8_t in C and int in OCaml (char does not have necessary operators...) *)\nunfold type byte = u8\n\n(** Realized in C by a pair of a length field and uint8_t* in C\n Realized in OCaml by a string *)\nval bytes : t:Type0{hasEq t}\nval len : bytes -> u32\n\nunfold let length b = FStar.UInt32.v (len b)\n\n(** representation for specs that need lemmas not defined here. *)\nval reveal:\n bytes\n -> GTot (S.seq byte)\n\nval length_reveal:\n x:bytes\n -> Lemma (ensures (S.length (reveal x) = length x))\n [SMTPatOr [[SMTPat (S.length (reveal x))];\n [SMTPat (len x)]]]\n\nval hide:\n s:S.seq byte{S.length s < pow2 32}\n -> GTot bytes\n\nval hide_reveal:\n x:bytes\n -> Lemma (ensures (hide (reveal x) = x))\n [SMTPat (reveal x)]\n\nval reveal_hide:\n x:S.seq byte{S.length x < pow2 32}\n -> Lemma (ensures (reveal (hide x) == x))\n [SMTPat (hide x)]\n\ntype lbytes (l:nat) = b:bytes{length b = l}\ntype kbytes (k:nat) = b:bytes{length b < pow2 k}\n\nlet lbytes32 (l:UInt32.t) = b:bytes{len b = l}\n\nval empty_bytes : lbytes 0\nval empty_unique:\n b:bytes\n -> Lemma (length b = 0 ==> b = empty_bytes)\n [SMTPat (len b)]\n\n(** If you statically know the length, it is OK to read at arbitrary indexes *)\nval get:\n b:bytes\n -> pos:u32{U32.v pos < length b}\n -> Pure byte\n (requires True)\n (ensures (fun y -> y == S.index (reveal b) (U32.v pos)))\n\nunfold let op_String_Access = get\n\nPartial implementation:\nlet index (b: bytes) (i: nat{i < length b}) =\n \n"
},
{
"role": "assistant",
"content": "get b (U32.uint_to_t i)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: n: Prims.nat{FStar.UInt.size n FStar.UInt32.n} -> v: FStar.Bytes.byte\n -> b:\n FStar.Bytes.lbytes (FStar.UInt32.v (FStar.UInt32.uint_to_t n))\n {forall (i: FStar.Bytes.u32{i <^ FStar.UInt32.uint_to_t n}). {:pattern b.[ i ]} b.[ i ] == v}\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2017 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n(*\nA standard library for manipulation of value bytes.\n\nThis model is realized by Bytes.bytes in OCaml and by\nstruct {uintX_t size; char *bytes} (or similar) in C.\n\nThis file is essentially a specialized version of FStar.Seq,\nwith lemmas and refinements taylored for typical operations on\nbytes, and with support for machine integers and C-extractible versions\n(which Seq does not provide.)\n\n@summary Value bytes standard library\n*)\nmodule FStar.Bytes\n\nmodule S = FStar.Seq\nmodule U = FStar.UInt\nmodule U8 = FStar.UInt8\nmodule U16 = FStar.UInt16\nmodule U32 = FStar.UInt32\nmodule U64 = FStar.UInt64\nmodule Str = FStar.String\nmodule Chr = FStar.Char\n\nunfold let u8 = U8.t\nunfold let u16 = U16.t\nunfold let u32 = U32.t\n\n(** Realized by uint8_t in C and int in OCaml (char does not have necessary operators...) *)\nunfold type byte = u8\n\n(** Realized in C by a pair of a length field and uint8_t* in C\n Realized in OCaml by a string *)\nval bytes : t:Type0{hasEq t}\nval len : bytes -> u32\n\nunfold let length b = FStar.UInt32.v (len b)\n\n(** representation for specs that need lemmas not defined here. *)\nval reveal:\n bytes\n -> GTot (S.seq byte)\n\nval length_reveal:\n x:bytes\n -> Lemma (ensures (S.length (reveal x) = length x))\n [SMTPatOr [[SMTPat (S.length (reveal x))];\n [SMTPat (len x)]]]\n\nval hide:\n s:S.seq byte{S.length s < pow2 32}\n -> GTot bytes\n\nval hide_reveal:\n x:bytes\n -> Lemma (ensures (hide (reveal x) = x))\n [SMTPat (reveal x)]\n\nval reveal_hide:\n x:S.seq byte{S.length x < pow2 32}\n -> Lemma (ensures (reveal (hide x) == x))\n [SMTPat (hide x)]\n\ntype lbytes (l:nat) = b:bytes{length b = l}\ntype kbytes (k:nat) = b:bytes{length b < pow2 k}\n\nlet lbytes32 (l:UInt32.t) = b:bytes{len b = l}\n\nval empty_bytes : lbytes 0\nval empty_unique:\n b:bytes\n -> Lemma (length b = 0 ==> b = empty_bytes)\n [SMTPat (len b)]\n\n(** If you statically know the length, it is OK to read at arbitrary indexes *)\nval get:\n b:bytes\n -> pos:u32{U32.v pos < length b}\n -> Pure byte\n (requires True)\n (ensures (fun y -> y == S.index (reveal b) (U32.v pos)))\n\nunfold let op_String_Access = get\n\nunfold let index (b:bytes) (i:nat{i < length b}) = get b (U32.uint_to_t i)\n\nlet equal b1 b2 =\n length b1 = length b2 /\\\n (forall (i:u32{U32.v i < length b1}).{:pattern (b1.[i]); (b2.[i])} b1.[i] == b2.[i])\n\nval extensionality:\n b1:bytes\n -> b2:bytes\n -> Lemma (requires (equal b1 b2))\n (ensures (b1 = b2))\n\n(** creating byte values **)\nval create:\n len:u32\n -> v:byte\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == v}\n\nPartial implementation:\nlet create_ (n: nat{FStar.UInt.size n U32.n}) v =\n \n"
},
{
"role": "assistant",
"content": "create (U32.uint_to_t n) v"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: b: FStar.Bytes.bytes -> s: Prims.nat -> e: Prims.nat{s <= e /\\ e <= FStar.Bytes.length b}\n -> r:\n FStar.Bytes.bytes\n { FStar.Bytes.reveal r ==\n FStar.Seq.Base.slice (FStar.Bytes.reveal b)\n (FStar.UInt32.v (FStar.UInt32.uint_to_t s))\n (FStar.UInt32.v (FStar.UInt32.uint_to_t e)) }\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2017 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n(*\nA standard library for manipulation of value bytes.\n\nThis model is realized by Bytes.bytes in OCaml and by\nstruct {uintX_t size; char *bytes} (or similar) in C.\n\nThis file is essentially a specialized version of FStar.Seq,\nwith lemmas and refinements taylored for typical operations on\nbytes, and with support for machine integers and C-extractible versions\n(which Seq does not provide.)\n\n@summary Value bytes standard library\n*)\nmodule FStar.Bytes\n\nmodule S = FStar.Seq\nmodule U = FStar.UInt\nmodule U8 = FStar.UInt8\nmodule U16 = FStar.UInt16\nmodule U32 = FStar.UInt32\nmodule U64 = FStar.UInt64\nmodule Str = FStar.String\nmodule Chr = FStar.Char\n\nunfold let u8 = U8.t\nunfold let u16 = U16.t\nunfold let u32 = U32.t\n\n(** Realized by uint8_t in C and int in OCaml (char does not have necessary operators...) *)\nunfold type byte = u8\n\n(** Realized in C by a pair of a length field and uint8_t* in C\n Realized in OCaml by a string *)\nval bytes : t:Type0{hasEq t}\nval len : bytes -> u32\n\nunfold let length b = FStar.UInt32.v (len b)\n\n(** representation for specs that need lemmas not defined here. *)\nval reveal:\n bytes\n -> GTot (S.seq byte)\n\nval length_reveal:\n x:bytes\n -> Lemma (ensures (S.length (reveal x) = length x))\n [SMTPatOr [[SMTPat (S.length (reveal x))];\n [SMTPat (len x)]]]\n\nval hide:\n s:S.seq byte{S.length s < pow2 32}\n -> GTot bytes\n\nval hide_reveal:\n x:bytes\n -> Lemma (ensures (hide (reveal x) = x))\n [SMTPat (reveal x)]\n\nval reveal_hide:\n x:S.seq byte{S.length x < pow2 32}\n -> Lemma (ensures (reveal (hide x) == x))\n [SMTPat (hide x)]\n\ntype lbytes (l:nat) = b:bytes{length b = l}\ntype kbytes (k:nat) = b:bytes{length b < pow2 k}\n\nlet lbytes32 (l:UInt32.t) = b:bytes{len b = l}\n\nval empty_bytes : lbytes 0\nval empty_unique:\n b:bytes\n -> Lemma (length b = 0 ==> b = empty_bytes)\n [SMTPat (len b)]\n\n(** If you statically know the length, it is OK to read at arbitrary indexes *)\nval get:\n b:bytes\n -> pos:u32{U32.v pos < length b}\n -> Pure byte\n (requires True)\n (ensures (fun y -> y == S.index (reveal b) (U32.v pos)))\n\nunfold let op_String_Access = get\n\nunfold let index (b:bytes) (i:nat{i < length b}) = get b (U32.uint_to_t i)\n\nlet equal b1 b2 =\n length b1 = length b2 /\\\n (forall (i:u32{U32.v i < length b1}).{:pattern (b1.[i]); (b2.[i])} b1.[i] == b2.[i])\n\nval extensionality:\n b1:bytes\n -> b2:bytes\n -> Lemma (requires (equal b1 b2))\n (ensures (b1 = b2))\n\n(** creating byte values **)\nval create:\n len:u32\n -> v:byte\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == v}\n\nunfold\nlet create_ (n:nat{FStar.UInt.size n U32.n}) v = create (U32.uint_to_t n) v\n\nval init:\n len:u32\n -> f:(i:u32{U32.(i <^ len)} -> byte)\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == f i}\n\n// this is a hack JROESCH\nval abyte (b:byte) : lbytes 1\n (* admit () create 1ul b *)\n\nval twobytes (b:byte*byte) : lbytes 2\n // init 2ul (fun i -> if i = 0ul then fst b else snd b)\n\n(** appending bytes **)\nval append:\n b1:bytes\n -> b2:bytes\n -> Pure bytes\n (requires (UInt.size (length b1 + length b2) U32.n))\n (ensures (fun b -> reveal b == S.append (reveal b1) (reveal b2)))\nunfold let op_At_Bar = append\n\nval slice:\n b:bytes\n -> s:u32\n -> e:u32{U32.(s <=^ e) /\\ U32.v e <= length b}\n\nPartial implementation:\nlet slice_ b (s: nat) (e: nat{s <= e /\\ e <= length b}) =\n \n"
},
{
"role": "assistant",
"content": "slice b (U32.uint_to_t s) (U32.uint_to_t e)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n k0: Pulse.Checker.Base.continuation_elaborator g0 ctxt0 g1 ctxt1 ->\n k1:\n Pulse.Checker.Base.continuation_elaborator g1 ctxt1 g2 ctxt2\n {Pulse.Typing.Env.env_extends g1 g0}\n -> Pulse.Checker.Base.continuation_elaborator g0 ctxt0 g2 ctxt2\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nPartial implementation:\nlet k_elab_trans\n (#g0: env)\n (#g1: env{g1 `env_extends` g0})\n (#g2: env{g2 `env_extends` g1})\n (#ctxt0 #ctxt1 #ctxt2: term)\n (k0: continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1: continuation_elaborator g1 ctxt1 g2 ctxt2 {g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2 =\n \n"
},
{
"role": "assistant",
"content": "fun post_hint res -> k0 post_hint (k1 post_hint res)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: win: Prims.bool -> alg: Vale.AES.AES_common_s.algorithm -> Vale.X64.Decls.va_code\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key\n round_keys keys_b hkeys_b total_bytes old_hash completed_quads h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n (va_QProc (va_code_Gcm_extra_bytes alg) ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem])\n (va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE) (va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE))\n//--\n//-- Gcm_blocks128\n\nval va_code_Gcm_blocks128 : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks128 alg =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_CCons\n (va_code_Ghash_buffer ()) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRbx)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR12)) (va_CCons (va_code_Gctr_blocks128 alg) (va_CNil ())))))))))\n\nval va_codegen_success_Gcm_blocks128 : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks128 alg =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax))\n (va_pbool_and (va_codegen_success_Ghash_buffer ()) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_pbool_and\n (va_codegen_success_Gctr_blocks128 alg) (va_ttrue ()))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks128 (va_mods:va_mods_t) (alg:algorithm) (in_b:buffer128) (out_b:buffer128)\n (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)\n : (va_quickCode unit (va_code_Gcm_blocks128 alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 274 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 275 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 276 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 277 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_buffer hkeys_b in_b h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_old_s))) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 278 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 279 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 280 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_QEmpty (()))))))))))\n\n\nval va_lemma_Gcm_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->\n out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks128 alg) va_s0 /\\ va_get_ok va_s0 /\\\n (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) /\\ va_state_eq va_sM (va_update_flags va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 10 va_sM (va_update_xmm 11 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR11 va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks128 va_b0 va_s0 alg in_b out_b key round_keys keys_b hkeys_b h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks128 va_mods alg in_b out_b key round_keys keys_b hkeys_b h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks128 alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 210 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (label va_range1\n \"***** POSTCONDITION NOT MET AT line 255 column 53 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 261 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 262 column 45 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx\n va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 265 column 93 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM == va_get_xmm 8 va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) out_b)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 267 column 131 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 > 0 ==> l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==>\n FStar.Seq.Base.length #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) (va_s0:va_state)\n (va_k:(va_state -> unit -> Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret) /\\ (forall (va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_rdi:nat64) (va_x_r11:nat64)\n (va_x_r10:nat64) (va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32)\n (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl\n (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11\n (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5\n (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1\n (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64\n rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbx\n va_x_rbx (va_upd_mem va_x_mem va_s0))))))))))))))))))) in va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq\n nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32 ->\n va_s0:va_state -> va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b\n h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks128 alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;\n va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;\n va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks128 (va_code_Gcm_blocks128 alg) va_s0 alg in_b out_b key\n round_keys keys_b hkeys_b h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 11 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6\n va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2\n va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR12 va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM (va_update_ok va_sM (va_update_mem\n va_sM va_s0))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) : (va_quickCode\n unit (va_code_Gcm_blocks128 alg)) =\n (va_QProc (va_code_Gcm_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;\n va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64\n rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem])\n (va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE)\n (va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE))\n//--\n//-- Gcm_blocks\n#push-options \"--z3rlimit 1000\"\n\nval va_code_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks alg offset =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9) (va_const_opr64\n 32)) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRbx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 0)) (va_CCons (va_code_Gcm_blocks_auth ()) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8)) (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp) (offset + 16))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 24)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13))\n (va_CCons (va_code_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64\n rR8) 0 Public) (va_CCons (va_code_Store128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_reg_opr64_reg64 rRbp) (va_op_xmm_xmm 1) 0 Secret) (va_CCons (va_code_Load_one_lsb\n (va_op_xmm_xmm 10)) (va_CCons (va_code_VPaddd (va_op_xmm_xmm 1) (va_op_xmm_xmm 1)\n (va_op_xmm_xmm 10)) (va_CCons (va_code_AES_GCM_decrypt_6mult alg) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11) (va_op_reg_opr64_reg64\n rRbp) 32 Secret) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp)\n (offset + 32)) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdi)\n (va_op_reg_opr64_reg64 rRsp) (offset + 40)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 48)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rR14) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_InitPshufbMask (va_op_xmm_xmm 9) (va_op_reg_opr64_reg64 rR12)) (va_CCons\n (va_code_Pshufb (va_op_xmm_xmm 11) (va_op_xmm_xmm 9)) (va_CCons (va_code_Gcm_blocks128 alg)\n (va_CCons (va_code_Stack_lemma ()) (va_CCons (va_code_Add64 (va_op_dst_opr64_reg64 rR14)\n (va_opr_code_Stack (va_op_reg64_reg64 rRsp) (offset + 24) Public)) (va_CCons (va_code_IMul64\n (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rR13) (va_op_reg_opr64_reg64 rRsp) (offset + 64)) (va_CCons (va_IfElse\n (va_cmp_gt (va_op_cmp_reg64 rR13) (va_op_cmp_reg64 rR14)) (va_Block (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 56))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR10) (va_op_opr64_reg64 rR13)) (va_CCons\n (va_code_And64 (va_op_dst_opr64_reg64 rR10) (va_const_opr64 15)) (va_CCons\n (va_code_Gcm_extra_bytes alg) (va_CCons (va_Block (va_CNil ())) (va_CNil ()))))))) (va_Block\n (va_CNil ()))) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15))\n (va_CCons (va_code_Gcm_make_length_quad ()) (va_CCons (va_code_Ghash_register ()) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRbp) 0 Secret) (va_CCons (va_code_Gctr_register alg) (va_CCons (va_Block (va_CNil ()))\n (va_CNil ()))))))))))))))))))))))))))))))))))))\n\nval va_codegen_success_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks alg offset =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9)\n (va_const_opr64 32)) (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64\n rRbx) (va_op_reg_opr64_reg64 rRsp) (offset + 0)) (va_pbool_and\n (va_codegen_success_Gcm_blocks_auth ()) (va_pbool_and (va_codegen_success_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8)) (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp)\n (offset + 16)) (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRdx)\n (va_op_reg_opr64_reg64 rRsp) (offset + 24)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13)) (va_pbool_and (va_codegen_success_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and (va_codegen_success_Load128_buffer\n (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64 rR8) 0 Public)\n (va_pbool_and (va_codegen_success_Store128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_reg_opr64_reg64 rRbp) (va_op_xmm_xmm 1) 0 Secret) (va_pbool_and\n (va_codegen_success_Load_one_lsb (va_op_xmm_xmm 10)) (va_pbool_and (va_codegen_success_VPaddd\n (va_op_xmm_xmm 1) (va_op_xmm_xmm 1) (va_op_xmm_xmm 10)) (va_pbool_and\n (va_codegen_success_AES_GCM_decrypt_6mult alg) (va_pbool_and (va_codegen_success_Load128_buffer\n (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11) (va_op_reg_opr64_reg64 rRbp) 32 Secret)\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRax)\n (va_op_reg_opr64_reg64 rRsp) (offset + 32)) (va_pbool_and (va_codegen_success_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 40)) (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 48)) (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR14)\n (va_op_opr64_reg64 rRdx)) (va_pbool_and (va_codegen_success_InitPshufbMask (va_op_xmm_xmm 9)\n (va_op_reg_opr64_reg64 rR12)) (va_pbool_and (va_codegen_success_Pshufb (va_op_xmm_xmm 11)\n (va_op_xmm_xmm 9)) (va_pbool_and (va_codegen_success_Gcm_blocks128 alg) (va_pbool_and\n (va_codegen_success_Stack_lemma ()) (va_pbool_and (va_codegen_success_Add64\n (va_op_dst_opr64_reg64 rR14) (va_opr_code_Stack (va_op_reg64_reg64 rRsp) (offset + 24) Public))\n (va_pbool_and (va_codegen_success_IMul64 (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rR13)\n (va_op_reg_opr64_reg64 rRsp) (offset + 64)) (va_pbool_and (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp)\n (offset + 56)) (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR10)\n (va_op_opr64_reg64 rR13)) (va_pbool_and (va_codegen_success_And64 (va_op_dst_opr64_reg64 rR10)\n (va_const_opr64 15)) (va_codegen_success_Gcm_extra_bytes alg)))) (va_pbool_and\n (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15)) (va_pbool_and\n (va_codegen_success_Gcm_make_length_quad ()) (va_pbool_and (va_codegen_success_Ghash_register\n ()) (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64 rRbp) 0 Secret) (va_pbool_and\n (va_codegen_success_Gctr_register alg) (va_ttrue ()))))))))))))))))))))))))))))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks (va_mods:va_mods_t) (alg:algorithm) (offset:int) (auth_b:buffer128)\n (abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128)\n (out128_b:buffer128) (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit\n (va_code_Gcm_blocks alg offset)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 0) (va_get_stack va_s) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 8) (va_get_stack va_s) in let\n (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 16) (va_get_stack va_s) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 24) (va_get_stack va_s) in let\n (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 32) (va_get_stack va_s) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 40) (va_get_stack va_s) in let\n (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset\n + 48) (va_get_stack va_s) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 56) (va_get_stack va_s) in let\n (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s\n + offset + 64) (va_get_stack va_s) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_old_s)) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 463 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 464 column 13 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9) (va_const_opr64 32))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 465 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRbx) (va_op_reg_opr64_reg64 rRsp) (offset + 0))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 466 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE) (fun (va_s:va_state)\n (auth_quad_seq:(seq quad32)) -> let (y_0:quad32) = Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 0 0 0 0 in let (y_auth_bytes:quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 473 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 474 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp) (offset + 16))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 475 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 24))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 476 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 477 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> let\n (iv_BE:Vale.X64.Decls.quad32) = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2\n va_old_s) in let (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32\n iv_BE 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 483 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64\n rR8) 0 Public iv_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 485 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_reg_opr64_reg64 rRbp)\n (va_op_xmm_xmm 1) 0 Secret scratch_b 0) (fun (va_s:va_state) _ -> let (j0:quad32) = va_get_xmm\n 1 va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 487 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load_one_lsb (va_op_xmm_xmm 10)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 489 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_VPaddd (va_op_xmm_xmm 1) (va_op_xmm_xmm 1) (va_op_xmm_xmm 10)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 491 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AES_GCM_decrypt_6mult alg h_LE iv_b in128x6_b out128x6_b scratch_b key round_keys\n keys_b hkeys_b) (fun (va_s:va_state) _ -> let (y_cipher128x6:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in let (auth_in:(seq quad32)) =\n auth_quad_seq in let (va_arg138:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b in let\n (va_arg137:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg136:Vale.Def.Types_s.quad32) = y_auth_bytes in let (va_arg135:Vale.Def.Types_s.quad32) =\n y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 494 column 36 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_ghash_incremental0_append h_LE va_arg135 va_arg136\n y_cipher128x6 va_arg137 va_arg138) (let auth_in = FStar.Seq.Base.append #quad32 auth_in\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 498 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11)\n (va_op_reg_opr64_reg64 rRbp) 32 Secret scratch_b 2) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 499 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 500 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 32))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 501 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 40))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 502 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 48))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 503 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR14) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 504 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_InitPshufbMask (va_op_xmm_xmm 9) (va_op_reg_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 505 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 11) (va_op_xmm_xmm 9)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 506 column 18 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks128 alg in128_b out128_b key round_keys keys_b hkeys_b h_LE) (fun\n (va_s:va_state) _ -> let (y_cipher128:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in let (va_arg134:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b in let\n (va_arg133:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg132:Vale.Def.Types_s.quad32) = y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 508 column 36 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_ghash_incremental0_append h_LE va_arg132 y_cipher128x6\n y_cipher128 va_arg133 va_arg134) (let auth_in = FStar.Seq.Base.append #quad32 auth_in\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 512 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Stack_lemma (va_op_reg64_reg64 rRsp) (offset + 24) Public) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 512 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Add64 (va_op_dst_opr64_reg64 rR14) (va_opr_code_Stack (va_op_reg64_reg64 rRsp)\n (offset + 24) Public)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 513 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_IMul64 (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 514 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rR13) (va_op_reg_opr64_reg64 rRsp) (offset + 64))\n (fun (va_s:va_state) _ -> let (y_inout:Vale.Def.Types_s.quad32) = y_cipher128 in let\n (plain_byte_seq:(seq quad32)) = empty_seq_quad32 in let (cipher_byte_seq:(seq quad32)) =\n empty_seq_quad32 in let (va_arg131:Vale.Def.Types_s.quad32) = va_get_xmm 11 va_s in let\n (va_arg130:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32)) = key in let\n (va_arg129:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = cipher_byte_seq in let\n (va_arg128:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = plain_byte_seq in let\n (va_arg127:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 519 column 29 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_opaque_init va_arg127 va_arg128 va_arg129 va_arg130\n va_arg131) (let (total_bytes:(va_int_at_least 0)) = FStar.Seq.Base.length #quad32 auth_quad_seq\n `op_Multiply` 16 + plain_num_bytes in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 523 column 8 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_qIf va_mods (Cmp_gt (va_op_cmp_reg64 rR13) (va_op_cmp_reg64 rR14)) (qblock va_mods (fun\n (va_s:va_state) -> va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 525 column 21 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 56))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 526 column 14 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR10) (va_op_opr64_reg64 rR13)) (fun (va_s:va_state) _\n -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 527 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.Poly1305.Math.lemma_poly_bits64 ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 528 column 14 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_And64 (va_op_dst_opr64_reg64 rR10) (va_const_opr64 15)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 532 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes y_0 auth_in\n h_LE) (fun (va_s:va_state) _ -> let y_inout = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm\n 8 va_s) in let (raw_auth_quads:(FStar.Seq.Base.seq quad32)) = FStar.Seq.Base.append #quad32\n auth_in (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b) in va_qAssertSquash\n va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 536 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun a_1906 (s_1907:(FStar.Seq.Base.seq a_1906)) (i_1908:Prims.nat) (j_1909:Prims.nat) -> let\n (j_1869:Prims.nat) = j_1909 in Prims.b2t (Prims.op_AmpAmp (Prims.op_LessThanOrEqual i_1908\n j_1869) (Prims.op_LessThanOrEqual j_1869 (FStar.Seq.Base.length #a_1906 s_1907))))\n Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 total_bytes)\n (fun _ -> let (auth_input_bytes:(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)) =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n raw_auth_quads) 0 total_bytes in let (padded_auth_bytes:(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in let auth_in =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes in let plain_byte_seq =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b in let cipher_byte_seq =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QEmpty ((auth_in,\n cipher_byte_seq, plain_byte_seq, y_inout)))))))))) (qblock va_mods (fun (va_s:va_state) ->\n va_QEmpty ((auth_in, cipher_byte_seq, plain_byte_seq, y_inout))))) (fun (va_s:va_state) va_g ->\n let ((auth_in:(seq quad32)), (cipher_byte_seq:(seq quad32)), (plain_byte_seq:(seq quad32)),\n (y_inout:Vale.Def.Types_s.quad32)) = va_g in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 547 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 548 column 25 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_make_length_quad ()) (fun (va_s:va_state) _ -> let\n (length_quad32:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 0\n va_s) in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 551 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_register hkeys_b h_LE y_inout) (fun (va_s:va_state) _ -> let\n (y_final:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s)\n in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 554 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRbp) 0 Secret scratch_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 557 column 18 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_register alg key round_keys keys_b) (fun (va_s:va_state) _ -> let\n (va_arg126:Vale.Def.Types_s.quad32) = va_get_xmm 8 va_s in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 560 column 40 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.Arch.Types.le_seq_quad32_to_bytes_of_singleton va_arg126)\n (va_qAssertSquash va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 561 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun (icb_BE_677:Vale.Def.Types_s.quad32) (plain_LE_678:Vale.Def.Types_s.quad32)\n (alg_679:Vale.AES.AES_common_s.algorithm) (key_680:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32))\n (i_681:Prims.int) -> Vale.AES.AES_s.is_aes_key_LE alg_679 key_680) j0 y_final alg key 0) (fun _\n -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 561 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 8 va_s == Vale.AES.GCTR_s.gctr_encrypt_block j0 y_final alg key 0) (let\n (plain128:(FStar.Seq.Base.seq Vale.X64.Decls.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in let\n (cipher128:(FStar.Seq.Base.seq Vale.X64.Decls.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) in128_b) in va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 566 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.length #quad32 plain_byte_seq == 0 ==> FStar.Seq.Base.equal\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 plain128 plain_byte_seq)\n plain128) (va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 567 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.length #quad32 cipher_byte_seq == 0 ==> FStar.Seq.Base.equal\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 cipher128 cipher_byte_seq)\n cipher128) (let (va_arg125:Vale.Def.Types_s.quad32) = Vale.AES.GCTR.inc32lite ctr_BE_2 len128x6\n in let (va_arg124:Vale.Def.Types_s.quad32) = ctr_BE_2 in let (va_arg123:(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat32)) = key in let (va_arg122:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32))\n = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b in let\n (va_arg121:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) in128_b in let (va_arg120:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) out128x6_b in let\n (va_arg119:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_old_s) in128x6_b in let (va_arg118:Prims.nat) = len128 in let\n (va_arg117:Prims.nat) = len128x6 in let (va_arg116:Vale.AES.AES_common_s.algorithm) = alg in\n va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 569 column 30 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.lemma_gctr_partial_append va_arg116 va_arg117 va_arg118\n va_arg119 va_arg120 va_arg121 va_arg122 va_arg123 va_arg124 va_arg125) (let\n (va_arg115:Vale.Def.Types_s.quad32) = Vale.AES.GCTR.inc32lite (Vale.AES.GCTR.inc32lite ctr_BE_2\n len128x6) len128 in let (va_arg114:Vale.Def.Types_s.quad32) = ctr_BE_2 in let\n (va_arg113:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32)) = key in let\n (va_arg112:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = cipher_byte_seq in let\n (va_arg111:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = plain_byte_seq in let\n (va_arg110:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b) in let\n (va_arg109:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in let (va_arg108:Prims.nat) =\n FStar.Seq.Base.length #quad32 plain_byte_seq in let (va_arg107:Prims.nat) = len128x6 + len128\n in let (va_arg106:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 575 column 30 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.lemma_gctr_partial_append va_arg106 va_arg107 va_arg108\n va_arg109 va_arg110 va_arg111 va_arg112 va_arg113 va_arg114 va_arg115) (let\n (va_arg105:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg104:Vale.Def.Types_s.quad32) = y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 583 column 23 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_hash_append2 h_LE va_arg104 y_inout y_final va_arg105\n length_quad32) (let auth_in = FStar.Seq.Base.append #quad32 auth_in (FStar.Seq.Base.create\n #Vale.Def.Types_s.quad32 1 length_quad32) in let (va_arg103:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = auth_in in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 585 column 31 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.ghash_incremental_to_ghash h_LE va_arg103) (va_QEmpty\n (()))))))))))))))))))))))))))))))))))))))))))))))))\n\n\nval va_lemma_Gcm_blocks : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> offset:int ->\n auth_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->\n in128_b:buffer128 -> out128_b:buffer128 -> inout_b:buffer128 -> iv_b:buffer128 ->\n scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks alg offset) va_s0 /\\ va_get_ok va_s0 /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in sse_enabled /\\ movbe_enabled /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0)\n Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp\n va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))))))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet\n 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 iv_b\n (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0)\n (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32\n /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let\n (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in let\n (plain_in:(seq quad32)) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) else FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in let (cipher_out:(seq quad32)) = (if (plain_num_bytes\n > (len128x6 + len128) `op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) else\n FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM)\n out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) in let\n (cipher_bound:nat) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128 `op_Division`\n 8) then (len128x6 + len128 + 1) else (len128x6 + len128)) in Vale.AES.GCTR.gctr_partial alg\n cipher_bound plain_in cipher_out key ctr_BE_2 /\\ (let (length_quad:quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0)\n auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let\n (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in let\n (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes in let\n (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 (FStar.Seq.Base.append #quad32\n auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in let (total_bytes:nat) =\n FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 + plain_num_bytes in let\n (raw_quad_seq:(seq quad32)) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) then (let (ab:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32 raw_quad_seq\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let (pb:(seq\n nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32 pb) else\n raw_quad_seq) in let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 raw_quad_seq\n (FStar.Seq.Base.create #quad32 1 length_quad) in va_get_xmm 8 va_sM ==\n Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE h_LE auth_quad_seq) alg\n key 0))) /\\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0)))))))))))))))))))))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks va_b0 va_s0 alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;\n va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14;\n va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;\n va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;\n va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx;\n va_Mod_reg64 rRax; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks va_mods alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks alg offset) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 283 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n let (h_LE:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s0)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 396 column 56 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 397 column 52 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 398 column 57 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 399 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 400 column 58 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet\n 6 va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 403 column 39 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (plain_num_bytes < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 404 column 38 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRsi va_s0 < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 406 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 408 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (ctr_BE_1:quad32) = iv_BE in label va_range1\n \"***** POSTCONDITION NOT MET AT line 409 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 412 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (plain_in:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 421 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (cipher_out:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_sM) out128_b)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 430 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (cipher_bound:nat) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> len128x6 + len128 + 1) (fun _ -> len128x6 + len128) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 434 column 77 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg cipher_bound plain_in cipher_out key ctr_BE_2) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 438 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (length_quad:quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply`\n plain_num_bytes) 0) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 440 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1\n va_s0) auth_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 444 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 445 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 446 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes\n in label va_range1\n \"***** POSTCONDITION NOT MET AT line 448 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 (FStar.Seq.Base.append #quad32\n auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 452 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (total_bytes:nat) = FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 +\n plain_num_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 453 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_quad_seq:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply`\n 128 `op_Division` 8) (fun _ -> let (ab:(seq nat8)) = FStar.Seq.Base.slice\n #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32\n raw_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let\n (pb:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32\n pb) (fun _ -> raw_quad_seq) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 460 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 raw_quad_seq\n (FStar.Seq.Base.create #quad32 1 length_quad) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 461 column 106 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 8 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE\n h_LE auth_quad_seq) alg key 0)))))))))))))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit ->\n Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n let (h_LE:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s0)) in sse_enabled /\\\n movbe_enabled /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0)\n (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64\n (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)))) /\\ (forall (va_x_mem:vale_heap)\n (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64)\n (va_x_rsi:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)\n (va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)\n (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)\n (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)\n (va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)\n (va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_heap1:vale_heap) (va_x_heap2:vale_heap)\n (va_x_heap3:vale_heap) (va_x_heap5:vale_heap) (va_x_heap6:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 6\n va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 3 va_x_heap3\n (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 15 va_x_xmm15\n (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11\n va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8\n (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4\n (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0\n (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13\n (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10\n (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64\n rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx\n va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem\n va_s0))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\\ (let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet\n 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 iv_b\n (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0)\n (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32\n /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let\n (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in let\n (plain_in:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in let (cipher_out:(seq quad32)) = va_if\n (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128 `op_Division` 8) (fun _ ->\n FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM)\n inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) in let (cipher_bound:nat) = va_if (plain_num_bytes > (len128x6 + len128)\n `op_Multiply` 128 `op_Division` 8) (fun _ -> len128x6 + len128 + 1) (fun _ -> len128x6 +\n len128) in Vale.AES.GCTR.gctr_partial alg cipher_bound plain_in cipher_out key ctr_BE_2 /\\ (let\n (length_quad:quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1\n va_s0) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice\n #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64\n rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits\n auth_input_bytes in let (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32\n padded_auth_bytes in let (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32\n (FStar.Seq.Base.append #quad32 auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0)\n in128x6_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in let (total_bytes:nat)\n = FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 + plain_num_bytes in let\n (raw_quad_seq:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> let (ab:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32 raw_quad_seq\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let (pb:(seq\n nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32 pb) (fun\n _ -> raw_quad_seq) in let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32\n raw_quad_seq (FStar.Seq.Base.create #quad32 1 length_quad) in va_get_xmm 8 va_sM ==\n Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE h_LE auth_quad_seq) alg\n key 0))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks : alg:algorithm -> offset:int -> auth_b:buffer128 -> abytes_b:buffer128\n -> in128x6_b:buffer128 -> out128x6_b:buffer128 -> in128_b:buffer128 -> out128_b:buffer128 ->\n inout_b:buffer128 -> iv_b:buffer128 -> scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq\n quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit ->\n Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b\n in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks alg offset) ([va_Mod_flags;\n va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2;\n va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm\n 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15;\n va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10;\n va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0\n va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b\n iv_b scratch_b key round_keys keys_b hkeys_b va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks (va_code_Gcm_blocks alg offset) va_s0 alg offset auth_b\n abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b\n hkeys_b in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0))))))))))))))))))))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit (va_code_Gcm_blocks alg\n offset)) =\n (va_QProc (va_code_Gcm_blocks alg offset) ([va_Mod_flags; va_Mod_mem_heaplet 6;\n va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;\n va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;\n va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64\n rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64\n rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64\n rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks alg offset auth_b\n abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b\n hkeys_b) (va_wpProof_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b))\n#pop-options\n//--\n//-- Gcm_blocks_wrapped\n#push-options \"--z3rlimit 60\"\n\nval va_code_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks_wrapped alg offset =\n (va_Block (va_CCons (va_code_Gcm_blocks alg offset) (va_CCons (va_Block (va_CNil ())) (va_CCons\n (va_Block (va_CNil ())) (va_CNil ())))))\n\nval va_codegen_success_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks_wrapped alg offset =\n (va_pbool_and (va_codegen_success_Gcm_blocks alg offset) (va_ttrue ()))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks_wrapped (va_mods:va_mods_t) (alg:algorithm) (offset:int) (auth_b:buffer128)\n (abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128)\n (out128_b:buffer128) (inout_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE)\n (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)\n (hkeys_b:buffer128) (expected_tag:(seq nat8)) : (va_quickCode unit (va_code_Gcm_blocks_wrapped\n alg offset)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 0) (va_get_stack va_s) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 8) (va_get_stack va_s) in let\n (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 16) (va_get_stack va_s) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 24) (va_get_stack va_s) in let\n (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 32) (va_get_stack va_s) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 40) (va_get_stack va_s) in let\n (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset\n + 48) (va_get_stack va_s) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 56) (va_get_stack va_s) in let\n (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s\n + offset + 64) (va_get_stack va_s) in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 739 column 15 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b\n iv_b scratch_b key round_keys keys_b hkeys_b) (fun (va_s:va_state) _ -> va_qAssertSquash\n va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 741 column 37 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (8 `op_Multiply` va_get_reg64 rRsi va_old_s >= 0 /\\ 8 `op_Multiply` va_get_reg64 rRsi va_old_s\n <= 18446744073709551615 /\\ 8 `op_Multiply` plain_num_bytes >= 0 /\\ 8 `op_Multiply`\n plain_num_bytes <= 18446744073709551615) (fun _ -> let (va_arg55:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_old_s) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (va_arg54:Vale.Def.Types_s.quad32) = va_get_xmm 8 va_s in let\n (va_arg53:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s)) in let\n (va_arg52:Vale.Def.Types_s.quad32) = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2\n va_old_s) in let (va_arg51:Vale.AES.GCM_s.supported_iv_LE) = iv in let (va_arg50:Prims.nat) =\n va_get_reg64 rRsi va_old_s in let (va_arg49:Prims.nat) = plain_num_bytes in let\n (va_arg48:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s) inout_b in let (va_arg47:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b in let\n (va_arg46:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_s) out128x6_b in let (va_arg45:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b in let\n (va_arg44:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) in128_b in let (va_arg43:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b in\n let (va_arg42:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_old_s) abytes_b in let (va_arg41:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) auth_b in let\n (va_arg40:(FStar.Seq.Base.seq Vale.Def.Words_s.nat32)) = key in let\n (va_arg39:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 741 column 37 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCM.gcm_blocks_dec_helper_simplified va_arg39 va_arg40 va_arg41\n va_arg42 va_arg43 va_arg44 va_arg45 va_arg46 va_arg47 va_arg48 va_arg49 va_arg50 va_arg51\n va_arg52 va_arg53 va_arg54 va_arg55) (let (auth_raw_quads:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) auth_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_old_s)\n abytes_b) in va_qAssertSquash va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 751 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun a_1906 (s_1907:(FStar.Seq.Base.seq a_1906)) (i_1908:Prims.nat) (j_1909:Prims.nat) -> let\n (j_1869:Prims.nat) = j_1909 in Prims.b2t (Prims.op_AmpAmp (Prims.op_LessThanOrEqual i_1908\n j_1869) (Prims.op_LessThanOrEqual j_1869 (FStar.Seq.Base.length #a_1906 s_1907))))\n Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0 (va_get_reg64\n rRsi va_old_s)) (fun _ -> let (auth_bytes:(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)) =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n auth_raw_quads) 0 (va_get_reg64 rRsi va_old_s) in let (va_arg38:Vale.Def.Types_s.quad32) =\n Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_old_s) in let\n (va_arg37:Vale.AES.GCM_s.supported_iv_LE) = iv in let (va_arg36:Prims.nat) = plain_num_bytes in\n let (va_arg35:(FStar.Seq.Base.seq Vale.Def.Words_s.nat8)) = expected_tag in let\n (va_arg34:(FStar.Seq.Base.seq Vale.Def.Words_s.nat8)) = auth_bytes in let\n (va_arg33:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s) inout_b in let (va_arg32:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b in let\n (va_arg31:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_s) out128x6_b in let (va_arg30:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b in let\n (va_arg29:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) in128_b in let (va_arg28:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b in\n let (va_arg27:(FStar.Seq.Base.seq Vale.Def.Words_s.nat32)) = key in let\n (va_arg26:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 752 column 37 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCM.gcm_blocks_helper_dec_simplified va_arg26 va_arg27 va_arg28\n va_arg29 va_arg30 va_arg31 va_arg32 va_arg33 va_arg34 va_arg35 va_arg36 va_arg37 va_arg38)\n (va_QEmpty (()))))))))\n\n\nval va_lemma_Gcm_blocks_wrapped : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> offset:int ->\n auth_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->\n in128_b:buffer128 -> out128_b:buffer128 -> inout_b:buffer128 -> iv_b:buffer128 ->\n iv:supported_iv_LE -> scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) ->\n keys_b:buffer128 -> hkeys_b:buffer128 -> expected_tag:(seq nat8)\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks_wrapped alg offset) va_s0 /\\ va_get_ok\n va_s0 /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64\n rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in sse_enabled /\\ movbe_enabled /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0)\n Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp\n va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\\ (let iv_BE =\n Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let h_LE =\n Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)\n in iv_BE == Vale.AES.GCM_s.compute_iv_BE h_LE iv))))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in Vale.X64.Decls.modifies_buffer128 out128_b\n (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0)\n (va_get_mem_heaplet 5 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b\n (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\\n va_get_reg64 rRsi va_s0 < pow2_32 /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0\n (va_get_mem_heaplet 2 va_s0) in let auth_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_s0) abytes_b) in let auth_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n auth_raw_quads) 0 (va_get_reg64 rRsi va_s0) in let plain_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0)\n in128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) in let plain_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n plain_raw_quads) 0 plain_num_bytes in let cipher_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) in let cipher_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n cipher_raw_quads) 0 plain_num_bytes in l_and (l_and (l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.nat8 auth_bytes < pow2_32) (FStar.Seq.Base.length #Vale.Def.Types_s.nat8\n plain_bytes < pow2_32)) (cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat8) #bool (Vale.AES.GCM_s.gcm_decrypt_LE alg\n (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes expected_tag)))\n (Vale.Def.Types_s.le_quad32_to_bytes (va_get_xmm 8 va_sM) == Vale.AES.GCM.gcm_decrypt_LE_tag\n alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes))) /\\\n va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet\n 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1\n va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12\n va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM\n (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRbp va_sM\n (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM\n (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks_wrapped va_b0 va_s0 alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b expected_tag =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;\n va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14;\n va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;\n va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;\n va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx;\n va_Mod_reg64 rRax; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks_wrapped va_mods alg offset auth_b abytes_b in128x6_b out128x6_b\n in128_b out128_b inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b expected_tag in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks_wrapped alg offset) va_qc\n va_s0 (fun va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 588 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n label va_range1\n \"***** POSTCONDITION NOT MET AT line 711 column 56 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 712 column 52 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 713 column 57 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 714 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 715 column 58 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet\n 6 va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 718 column 39 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (plain_num_bytes < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 719 column 38 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRsi va_s0 < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 721 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 723 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let auth_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_s0)\n abytes_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 724 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let auth_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0 (va_get_reg64 rRsi va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 725 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 726 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 727 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 728 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 737 column 52 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)\n (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32)) (cipher_bytes ==\n __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #bool\n (Vale.AES.GCM_s.gcm_decrypt_LE alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv\n plain_bytes auth_bytes expected_tag))) (Vale.Def.Types_s.le_quad32_to_bytes (va_get_xmm 8\n va_sM) == Vale.AES.GCM.gcm_decrypt_LE_tag alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE\n key) iv plain_bytes auth_bytes))))))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks_wrapped (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (scratch_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (expected_tag:(seq nat8))\n (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n sse_enabled /\\ movbe_enabled /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 +\n offset + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0)\n Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2 va_s0) (va_get_reg64 rR8 va_s0) iv_b 1\n (va_get_mem_layout va_s0) Public /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6\n va_s0) in128x6_ptr in128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6 va_s0) out128x6_ptr out128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1\n va_s0) in128_ptr in128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) out128_ptr out128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5\n va_s0) inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128\n (va_get_mem_heaplet 3 va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0)\n hkeys_b 8 (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\\ (let iv_BE =\n Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let h_LE =\n Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)\n in iv_BE == Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\\ (forall (va_x_mem:vale_heap)\n (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64)\n (va_x_rsi:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)\n (va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)\n (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)\n (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)\n (va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)\n (va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_heap1:vale_heap) (va_x_heap2:vale_heap)\n (va_x_heap3:vale_heap) (va_x_heap5:vale_heap) (va_x_heap6:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 6\n va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 3 va_x_heap3\n (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 15 va_x_xmm15\n (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11\n va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8\n (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4\n (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0\n (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13\n (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10\n (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64\n rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx\n va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem\n va_s0))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\\ (let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in Vale.X64.Decls.modifies_buffer128 out128_b\n (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0)\n (va_get_mem_heaplet 5 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b\n (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\\n va_get_reg64 rRsi va_s0 < pow2_32 /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0\n (va_get_mem_heaplet 2 va_s0) in let auth_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_s0) abytes_b) in let auth_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n auth_raw_quads) 0 (va_get_reg64 rRsi va_s0) in let plain_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0)\n in128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) in let plain_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n plain_raw_quads) 0 plain_num_bytes in let cipher_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) in let cipher_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n cipher_raw_quads) 0 plain_num_bytes in l_and (l_and (l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.nat8 auth_bytes < pow2_32) (FStar.Seq.Base.length #Vale.Def.Types_s.nat8\n plain_bytes < pow2_32)) (cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat8) #bool (Vale.AES.GCM_s.gcm_decrypt_LE alg\n (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes expected_tag)))\n (Vale.Def.Types_s.le_quad32_to_bytes (va_get_xmm 8 va_sM) == Vale.AES.GCM.gcm_decrypt_LE_tag\n alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes))) ==> va_k\n va_sM (())))\n\n\nval va_wpProof_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> auth_b:buffer128 ->\n abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 -> in128_b:buffer128 ->\n out128_b:buffer128 -> inout_b:buffer128 -> iv_b:buffer128 -> iv:supported_iv_LE ->\n scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> expected_tag:(seq nat8) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks_wrapped alg offset auth_b abytes_b in128x6_b\n out128x6_b in128_b out128_b inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b\n expected_tag va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_wrapped alg offset)\n ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3;\n va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks_wrapped alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b\n inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b expected_tag va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks_wrapped (va_code_Gcm_blocks_wrapped alg offset) va_s0\n alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b iv scratch_b key\n round_keys keys_b hkeys_b expected_tag in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0))))))))))))))))))))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks_wrapped (alg:algorithm) (offset:int) (auth_b:buffer128)\n (abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128)\n (out128_b:buffer128) (inout_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE)\n (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)\n (hkeys_b:buffer128) (expected_tag:(seq nat8)) : (va_quickCode unit (va_code_Gcm_blocks_wrapped\n alg offset)) =\n (va_QProc (va_code_Gcm_blocks_wrapped alg offset) ([va_Mod_flags; va_Mod_mem_heaplet 6;\n va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;\n va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;\n va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64\n rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64\n rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64\n rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks_wrapped alg offset\n auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b iv scratch_b key round_keys\n keys_b hkeys_b expected_tag) (va_wpProof_Gcm_blocks_wrapped alg offset auth_b abytes_b\n in128x6_b out128x6_b in128_b out128_b inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b\n expected_tag))\n#pop-options\n//--\n//-- Gcm_blocks_decrypt_stdcall\n#push-options \"--z3rlimit 1600\"\n\nPartial implementation:\nlet va_code_Gcm_blocks_decrypt_stdcall win alg =\n \n"
},
{
"role": "assistant",
"content": "(va_Block (va_CCons (va_code_CreateHeaplets ())\n (va_CCons (va_Block (va_CNil ()))\n (va_CCons (va_code_Save_registers win)\n (va_CCons (if win\n then\n va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRsi)\n (va_op_opr64_reg64 rRdx))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR8))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRcx)\n (va_op_opr64_reg64 rR9))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rR8\n )\n (va_op_reg_opr64_reg64 rRsp)\n (224 + 40 + 0))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64\n rR9)\n (va_op_reg_opr64_reg64 rRsp)\n (224 + 40 + 8))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64\n rRbp)\n (va_op_reg_opr64_reg64 rRsp)\n (224 + 40 + 88))\n (va_CNil ()))))))))\n else\n va_Block (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRbp)\n (va_op_reg_opr64_reg64 rRsp)\n (64 + 8 + 72))\n (va_CNil ())))\n (va_CCons (va_code_Gcm_blocks_wrapped alg\n (Vale.X64.Decls.total_if #int win (224 + 56) (64 + 8)))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rR15)\n (va_op_reg_opr64_reg64 rRsp)\n (Vale.X64.Decls.total_if #int win (224 + 40 + 96) (64 + 8 + 80)))\n (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 0)\n (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rR15)\n 0\n Secret)\n (va_CCons (va_code_XmmEqual (va_op_xmm_xmm 0) (va_op_xmm_xmm 8))\n (va_CCons (va_Block (va_CNil ()))\n (va_CCons (va_Block (va_CNil ()))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRcx)\n (va_op_opr64_reg64 rRax))\n (va_CCons (va_code_Restore_registers win)\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRax\n )\n (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_DestroyHeaplets ())\n (va_CNil ()))))))))))))))))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: a: Type -> wp: ID5.wp a -> st: Type0 -> f: ID5.repr a wp\n -> DM4F_layered5.repr a st (DM4F_layered5.lift_id_st_wp wp)\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nlet lift_id_st_wp #a #st (w : ID5.wp a) : wp st a =\n elim_pure_wp_monotonicity_forall ();\n fun s0 p -> w (fun x -> p x s0)\n\nPartial implementation:\nlet lift_id_st a wp st (f: ID5.repr a wp) : repr a st (lift_id_st_wp wp) =\n \n"
},
{
"role": "assistant",
"content": "fun s0 -> ID5.ID?.reflect (ID5.bind _ _ _ _ f (fun x -> ID5.return _ (x, s0)))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: d01: Pulse.Typing.st_equiv g c0 c1 -> d12: Pulse.Typing.st_equiv g c1 c2\n -> FStar.Pervasives.Native.option (Pulse.Typing.st_equiv g c0 c2)\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nPartial implementation:\nlet st_equiv_trans (#g: env) (#c0 #c1 #c2: comp) (d01: st_equiv g c0 c1) (d12: st_equiv g c1 c2)\n : option (st_equiv g c0 c2) =\n \n"
},
{
"role": "assistant",
"content": "match d01 with\n| ST_VPropEquiv\n _f\n _c0\n _c1\n x\n c0_pre_typing\n c0_res_typing\n c0_post_typing\n eq_res_01\n eq_pre_01\n eq_post_01 ->\n (let\n ST_VPropEquiv\n _f\n _c1\n _c2\n y\n c1_pre_typing\n c1_res_typing\n c1_post_typing\n eq_res_12\n eq_pre_12\n eq_post_12 =\n d12\n in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then\n Some\n (ST_VPropEquiv g\n c0\n c2\n x\n c0_pre_typing\n c0_res_typing\n c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12))\n else None)\n| ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: b1: FStar.Bytes.bytes -> b2: FStar.Bytes.bytes -> Prims.logical\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2017 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n(*\nA standard library for manipulation of value bytes.\n\nThis model is realized by Bytes.bytes in OCaml and by\nstruct {uintX_t size; char *bytes} (or similar) in C.\n\nThis file is essentially a specialized version of FStar.Seq,\nwith lemmas and refinements taylored for typical operations on\nbytes, and with support for machine integers and C-extractible versions\n(which Seq does not provide.)\n\n@summary Value bytes standard library\n*)\nmodule FStar.Bytes\n\nmodule S = FStar.Seq\nmodule U = FStar.UInt\nmodule U8 = FStar.UInt8\nmodule U16 = FStar.UInt16\nmodule U32 = FStar.UInt32\nmodule U64 = FStar.UInt64\nmodule Str = FStar.String\nmodule Chr = FStar.Char\n\nunfold let u8 = U8.t\nunfold let u16 = U16.t\nunfold let u32 = U32.t\n\n(** Realized by uint8_t in C and int in OCaml (char does not have necessary operators...) *)\nunfold type byte = u8\n\n(** Realized in C by a pair of a length field and uint8_t* in C\n Realized in OCaml by a string *)\nval bytes : t:Type0{hasEq t}\nval len : bytes -> u32\n\nunfold let length b = FStar.UInt32.v (len b)\n\n(** representation for specs that need lemmas not defined here. *)\nval reveal:\n bytes\n -> GTot (S.seq byte)\n\nval length_reveal:\n x:bytes\n -> Lemma (ensures (S.length (reveal x) = length x))\n [SMTPatOr [[SMTPat (S.length (reveal x))];\n [SMTPat (len x)]]]\n\nval hide:\n s:S.seq byte{S.length s < pow2 32}\n -> GTot bytes\n\nval hide_reveal:\n x:bytes\n -> Lemma (ensures (hide (reveal x) = x))\n [SMTPat (reveal x)]\n\nval reveal_hide:\n x:S.seq byte{S.length x < pow2 32}\n -> Lemma (ensures (reveal (hide x) == x))\n [SMTPat (hide x)]\n\ntype lbytes (l:nat) = b:bytes{length b = l}\ntype kbytes (k:nat) = b:bytes{length b < pow2 k}\n\nlet lbytes32 (l:UInt32.t) = b:bytes{len b = l}\n\nval empty_bytes : lbytes 0\nval empty_unique:\n b:bytes\n -> Lemma (length b = 0 ==> b = empty_bytes)\n [SMTPat (len b)]\n\n(** If you statically know the length, it is OK to read at arbitrary indexes *)\nval get:\n b:bytes\n -> pos:u32{U32.v pos < length b}\n -> Pure byte\n (requires True)\n (ensures (fun y -> y == S.index (reveal b) (U32.v pos)))\n\nunfold let op_String_Access = get\n\nunfold let index (b:bytes) (i:nat{i < length b}) = get b (U32.uint_to_t i)\n\nPartial implementation:\nlet equal b1 b2 =\n \n"
},
{
"role": "assistant",
"content": "length b1 = length b2 /\\\n(forall (i: u32{U32.v i < length b1}). {:pattern (b1.[ i ]); (b2.[ i ])} b1.[ i ] == b2.[ i ])"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: win: Prims.bool -> alg: Vale.AES.AES_common_s.algorithm -> Vale.X64.Decls.va_pbool\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key\n round_keys keys_b hkeys_b total_bytes old_hash completed_quads h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n (va_QProc (va_code_Gcm_extra_bytes alg) ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem])\n (va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE) (va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE))\n//--\n//-- Gcm_blocks128\n\nval va_code_Gcm_blocks128 : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks128 alg =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_CCons\n (va_code_Ghash_buffer ()) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRbx)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR12)) (va_CCons (va_code_Gctr_blocks128 alg) (va_CNil ())))))))))\n\nval va_codegen_success_Gcm_blocks128 : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks128 alg =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax))\n (va_pbool_and (va_codegen_success_Ghash_buffer ()) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_pbool_and\n (va_codegen_success_Gctr_blocks128 alg) (va_ttrue ()))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks128 (va_mods:va_mods_t) (alg:algorithm) (in_b:buffer128) (out_b:buffer128)\n (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)\n : (va_quickCode unit (va_code_Gcm_blocks128 alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 274 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 275 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 276 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 277 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_buffer hkeys_b in_b h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_old_s))) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 278 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 279 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 280 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_QEmpty (()))))))))))\n\n\nval va_lemma_Gcm_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->\n out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks128 alg) va_s0 /\\ va_get_ok va_s0 /\\\n (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) /\\ va_state_eq va_sM (va_update_flags va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 10 va_sM (va_update_xmm 11 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR11 va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks128 va_b0 va_s0 alg in_b out_b key round_keys keys_b hkeys_b h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks128 va_mods alg in_b out_b key round_keys keys_b hkeys_b h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks128 alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 210 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (label va_range1\n \"***** POSTCONDITION NOT MET AT line 255 column 53 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 261 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 262 column 45 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx\n va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 265 column 93 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM == va_get_xmm 8 va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) out_b)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 267 column 131 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 > 0 ==> l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==>\n FStar.Seq.Base.length #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) (va_s0:va_state)\n (va_k:(va_state -> unit -> Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret) /\\ (forall (va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_rdi:nat64) (va_x_r11:nat64)\n (va_x_r10:nat64) (va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32)\n (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl\n (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11\n (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5\n (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1\n (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64\n rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbx\n va_x_rbx (va_upd_mem va_x_mem va_s0))))))))))))))))))) in va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq\n nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32 ->\n va_s0:va_state -> va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b\n h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks128 alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;\n va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;\n va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks128 (va_code_Gcm_blocks128 alg) va_s0 alg in_b out_b key\n round_keys keys_b hkeys_b h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 11 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6\n va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2\n va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR12 va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM (va_update_ok va_sM (va_update_mem\n va_sM va_s0))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) : (va_quickCode\n unit (va_code_Gcm_blocks128 alg)) =\n (va_QProc (va_code_Gcm_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;\n va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64\n rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem])\n (va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE)\n (va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE))\n//--\n//-- Gcm_blocks\n#push-options \"--z3rlimit 1000\"\n\nval va_code_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks alg offset =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9) (va_const_opr64\n 32)) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRbx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 0)) (va_CCons (va_code_Gcm_blocks_auth ()) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8)) (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp) (offset + 16))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 24)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13))\n (va_CCons (va_code_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64\n rR8) 0 Public) (va_CCons (va_code_Store128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_reg_opr64_reg64 rRbp) (va_op_xmm_xmm 1) 0 Secret) (va_CCons (va_code_Load_one_lsb\n (va_op_xmm_xmm 10)) (va_CCons (va_code_VPaddd (va_op_xmm_xmm 1) (va_op_xmm_xmm 1)\n (va_op_xmm_xmm 10)) (va_CCons (va_code_AES_GCM_decrypt_6mult alg) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11) (va_op_reg_opr64_reg64\n rRbp) 32 Secret) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp)\n (offset + 32)) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdi)\n (va_op_reg_opr64_reg64 rRsp) (offset + 40)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 48)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rR14) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_InitPshufbMask (va_op_xmm_xmm 9) (va_op_reg_opr64_reg64 rR12)) (va_CCons\n (va_code_Pshufb (va_op_xmm_xmm 11) (va_op_xmm_xmm 9)) (va_CCons (va_code_Gcm_blocks128 alg)\n (va_CCons (va_code_Stack_lemma ()) (va_CCons (va_code_Add64 (va_op_dst_opr64_reg64 rR14)\n (va_opr_code_Stack (va_op_reg64_reg64 rRsp) (offset + 24) Public)) (va_CCons (va_code_IMul64\n (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rR13) (va_op_reg_opr64_reg64 rRsp) (offset + 64)) (va_CCons (va_IfElse\n (va_cmp_gt (va_op_cmp_reg64 rR13) (va_op_cmp_reg64 rR14)) (va_Block (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 56))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR10) (va_op_opr64_reg64 rR13)) (va_CCons\n (va_code_And64 (va_op_dst_opr64_reg64 rR10) (va_const_opr64 15)) (va_CCons\n (va_code_Gcm_extra_bytes alg) (va_CCons (va_Block (va_CNil ())) (va_CNil ()))))))) (va_Block\n (va_CNil ()))) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15))\n (va_CCons (va_code_Gcm_make_length_quad ()) (va_CCons (va_code_Ghash_register ()) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRbp) 0 Secret) (va_CCons (va_code_Gctr_register alg) (va_CCons (va_Block (va_CNil ()))\n (va_CNil ()))))))))))))))))))))))))))))))))))))\n\nval va_codegen_success_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks alg offset =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9)\n (va_const_opr64 32)) (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64\n rRbx) (va_op_reg_opr64_reg64 rRsp) (offset + 0)) (va_pbool_and\n (va_codegen_success_Gcm_blocks_auth ()) (va_pbool_and (va_codegen_success_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8)) (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp)\n (offset + 16)) (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRdx)\n (va_op_reg_opr64_reg64 rRsp) (offset + 24)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13)) (va_pbool_and (va_codegen_success_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and (va_codegen_success_Load128_buffer\n (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64 rR8) 0 Public)\n (va_pbool_and (va_codegen_success_Store128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_reg_opr64_reg64 rRbp) (va_op_xmm_xmm 1) 0 Secret) (va_pbool_and\n (va_codegen_success_Load_one_lsb (va_op_xmm_xmm 10)) (va_pbool_and (va_codegen_success_VPaddd\n (va_op_xmm_xmm 1) (va_op_xmm_xmm 1) (va_op_xmm_xmm 10)) (va_pbool_and\n (va_codegen_success_AES_GCM_decrypt_6mult alg) (va_pbool_and (va_codegen_success_Load128_buffer\n (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11) (va_op_reg_opr64_reg64 rRbp) 32 Secret)\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRax)\n (va_op_reg_opr64_reg64 rRsp) (offset + 32)) (va_pbool_and (va_codegen_success_Load64_stack\n (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 40)) (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp)\n (offset + 48)) (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR14)\n (va_op_opr64_reg64 rRdx)) (va_pbool_and (va_codegen_success_InitPshufbMask (va_op_xmm_xmm 9)\n (va_op_reg_opr64_reg64 rR12)) (va_pbool_and (va_codegen_success_Pshufb (va_op_xmm_xmm 11)\n (va_op_xmm_xmm 9)) (va_pbool_and (va_codegen_success_Gcm_blocks128 alg) (va_pbool_and\n (va_codegen_success_Stack_lemma ()) (va_pbool_and (va_codegen_success_Add64\n (va_op_dst_opr64_reg64 rR14) (va_opr_code_Stack (va_op_reg64_reg64 rRsp) (offset + 24) Public))\n (va_pbool_and (va_codegen_success_IMul64 (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rR13)\n (va_op_reg_opr64_reg64 rRsp) (offset + 64)) (va_pbool_and (va_pbool_and\n (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp)\n (offset + 56)) (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR10)\n (va_op_opr64_reg64 rR13)) (va_pbool_and (va_codegen_success_And64 (va_op_dst_opr64_reg64 rR10)\n (va_const_opr64 15)) (va_codegen_success_Gcm_extra_bytes alg)))) (va_pbool_and\n (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15)) (va_pbool_and\n (va_codegen_success_Gcm_make_length_quad ()) (va_pbool_and (va_codegen_success_Ghash_register\n ()) (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 3)\n (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64 rRbp) 0 Secret) (va_pbool_and\n (va_codegen_success_Gctr_register alg) (va_ttrue ()))))))))))))))))))))))))))))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks (va_mods:va_mods_t) (alg:algorithm) (offset:int) (auth_b:buffer128)\n (abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128)\n (out128_b:buffer128) (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit\n (va_code_Gcm_blocks alg offset)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 0) (va_get_stack va_s) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 8) (va_get_stack va_s) in let\n (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 16) (va_get_stack va_s) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 24) (va_get_stack va_s) in let\n (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 32) (va_get_stack va_s) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 40) (va_get_stack va_s) in let\n (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset\n + 48) (va_get_stack va_s) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 56) (va_get_stack va_s) in let\n (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s\n + offset + 64) (va_get_stack va_s) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_old_s)) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 463 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 464 column 13 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AddLea64 (va_op_dst_opr64_reg64 rR9) (va_op_opr64_reg64 rR9) (va_const_opr64 32))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 465 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRbx) (va_op_reg_opr64_reg64 rRsp) (offset + 0))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 466 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE) (fun (va_s:va_state)\n (auth_quad_seq:(seq quad32)) -> let (y_0:quad32) = Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 0 0 0 0 in let (y_auth_bytes:quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 473 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 8))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 474 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRsi) (va_op_reg_opr64_reg64 rRsp) (offset + 16))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 475 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 24))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 476 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR13)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 477 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> let\n (iv_BE:Vale.X64.Decls.quad32) = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2\n va_old_s) in let (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32\n iv_BE 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 483 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 2) (va_op_xmm_xmm 1) (va_op_reg_opr64_reg64\n rR8) 0 Public iv_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 485 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_reg_opr64_reg64 rRbp)\n (va_op_xmm_xmm 1) 0 Secret scratch_b 0) (fun (va_s:va_state) _ -> let (j0:quad32) = va_get_xmm\n 1 va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 487 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load_one_lsb (va_op_xmm_xmm 10)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 489 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_VPaddd (va_op_xmm_xmm 1) (va_op_xmm_xmm 1) (va_op_xmm_xmm 10)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 491 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AES_GCM_decrypt_6mult alg h_LE iv_b in128x6_b out128x6_b scratch_b key round_keys\n keys_b hkeys_b) (fun (va_s:va_state) _ -> let (y_cipher128x6:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in let (auth_in:(seq quad32)) =\n auth_quad_seq in let (va_arg138:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b in let\n (va_arg137:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg136:Vale.Def.Types_s.quad32) = y_auth_bytes in let (va_arg135:Vale.Def.Types_s.quad32) =\n y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 494 column 36 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_ghash_incremental0_append h_LE va_arg135 va_arg136\n y_cipher128x6 va_arg137 va_arg138) (let auth_in = FStar.Seq.Base.append #quad32 auth_in\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 498 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 11)\n (va_op_reg_opr64_reg64 rRbp) 32 Secret scratch_b 2) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 499 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR8) (va_op_opr64_reg64 rRcx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 500 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 32))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 501 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdi) (va_op_reg_opr64_reg64 rRsp) (offset + 40))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 502 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRdx) (va_op_reg_opr64_reg64 rRsp) (offset + 48))\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 503 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR14) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 504 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_InitPshufbMask (va_op_xmm_xmm 9) (va_op_reg_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 505 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 11) (va_op_xmm_xmm 9)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 506 column 18 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks128 alg in128_b out128_b key round_keys keys_b hkeys_b h_LE) (fun\n (va_s:va_state) _ -> let (y_cipher128:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s) in let (va_arg134:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b in let\n (va_arg133:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg132:Vale.Def.Types_s.quad32) = y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 508 column 36 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_ghash_incremental0_append h_LE va_arg132 y_cipher128x6\n y_cipher128 va_arg133 va_arg134) (let auth_in = FStar.Seq.Base.append #quad32 auth_in\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 512 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Stack_lemma (va_op_reg64_reg64 rRsp) (offset + 24) Public) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 512 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Add64 (va_op_dst_opr64_reg64 rR14) (va_opr_code_Stack (va_op_reg64_reg64 rRsp)\n (offset + 24) Public)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 513 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_IMul64 (va_op_dst_opr64_reg64 rR14) (va_const_opr64 16)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 514 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rR13) (va_op_reg_opr64_reg64 rRsp) (offset + 64))\n (fun (va_s:va_state) _ -> let (y_inout:Vale.Def.Types_s.quad32) = y_cipher128 in let\n (plain_byte_seq:(seq quad32)) = empty_seq_quad32 in let (cipher_byte_seq:(seq quad32)) =\n empty_seq_quad32 in let (va_arg131:Vale.Def.Types_s.quad32) = va_get_xmm 11 va_s in let\n (va_arg130:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32)) = key in let\n (va_arg129:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = cipher_byte_seq in let\n (va_arg128:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = plain_byte_seq in let\n (va_arg127:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 519 column 29 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_opaque_init va_arg127 va_arg128 va_arg129 va_arg130\n va_arg131) (let (total_bytes:(va_int_at_least 0)) = FStar.Seq.Base.length #quad32 auth_quad_seq\n `op_Multiply` 16 + plain_num_bytes in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 523 column 8 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_qIf va_mods (Cmp_gt (va_op_cmp_reg64 rR13) (va_op_cmp_reg64 rR14)) (qblock va_mods (fun\n (va_s:va_state) -> va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 525 column 21 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load64_stack (va_op_dst_opr64_reg64 rRax) (va_op_reg_opr64_reg64 rRsp) (offset + 56))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 526 column 14 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR10) (va_op_opr64_reg64 rR13)) (fun (va_s:va_state) _\n -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 527 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.Poly1305.Math.lemma_poly_bits64 ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 528 column 14 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_And64 (va_op_dst_opr64_reg64 rR10) (va_const_opr64 15)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 532 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes y_0 auth_in\n h_LE) (fun (va_s:va_state) _ -> let y_inout = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm\n 8 va_s) in let (raw_auth_quads:(FStar.Seq.Base.seq quad32)) = FStar.Seq.Base.append #quad32\n auth_in (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b) in va_qAssertSquash\n va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 536 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun a_1906 (s_1907:(FStar.Seq.Base.seq a_1906)) (i_1908:Prims.nat) (j_1909:Prims.nat) -> let\n (j_1869:Prims.nat) = j_1909 in Prims.b2t (Prims.op_AmpAmp (Prims.op_LessThanOrEqual i_1908\n j_1869) (Prims.op_LessThanOrEqual j_1869 (FStar.Seq.Base.length #a_1906 s_1907))))\n Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 total_bytes)\n (fun _ -> let (auth_input_bytes:(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)) =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n raw_auth_quads) 0 total_bytes in let (padded_auth_bytes:(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in let auth_in =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes in let plain_byte_seq =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b in let cipher_byte_seq =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QEmpty ((auth_in,\n cipher_byte_seq, plain_byte_seq, y_inout)))))))))) (qblock va_mods (fun (va_s:va_state) ->\n va_QEmpty ((auth_in, cipher_byte_seq, plain_byte_seq, y_inout))))) (fun (va_s:va_state) va_g ->\n let ((auth_in:(seq quad32)), (cipher_byte_seq:(seq quad32)), (plain_byte_seq:(seq quad32)),\n (y_inout:Vale.Def.Types_s.quad32)) = va_g in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 547 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR11) (va_op_opr64_reg64 rR15)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 548 column 25 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_make_length_quad ()) (fun (va_s:va_state) _ -> let\n (length_quad32:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 0\n va_s) in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 551 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_register hkeys_b h_LE y_inout) (fun (va_s:va_state) _ -> let\n (y_final:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s)\n in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 554 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 3) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRbp) 0 Secret scratch_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 557 column 18 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_register alg key round_keys keys_b) (fun (va_s:va_state) _ -> let\n (va_arg126:Vale.Def.Types_s.quad32) = va_get_xmm 8 va_s in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 560 column 40 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.Arch.Types.le_seq_quad32_to_bytes_of_singleton va_arg126)\n (va_qAssertSquash va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 561 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun (icb_BE_677:Vale.Def.Types_s.quad32) (plain_LE_678:Vale.Def.Types_s.quad32)\n (alg_679:Vale.AES.AES_common_s.algorithm) (key_680:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32))\n (i_681:Prims.int) -> Vale.AES.AES_s.is_aes_key_LE alg_679 key_680) j0 y_final alg key 0) (fun _\n -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 561 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 8 va_s == Vale.AES.GCTR_s.gctr_encrypt_block j0 y_final alg key 0) (let\n (plain128:(FStar.Seq.Base.seq Vale.X64.Decls.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in let\n (cipher128:(FStar.Seq.Base.seq Vale.X64.Decls.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) in128_b) in va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 566 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.length #quad32 plain_byte_seq == 0 ==> FStar.Seq.Base.equal\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 plain128 plain_byte_seq)\n plain128) (va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 567 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.length #quad32 cipher_byte_seq == 0 ==> FStar.Seq.Base.equal\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 cipher128 cipher_byte_seq)\n cipher128) (let (va_arg125:Vale.Def.Types_s.quad32) = Vale.AES.GCTR.inc32lite ctr_BE_2 len128x6\n in let (va_arg124:Vale.Def.Types_s.quad32) = ctr_BE_2 in let (va_arg123:(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat32)) = key in let (va_arg122:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32))\n = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b in let\n (va_arg121:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) in128_b in let (va_arg120:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) out128x6_b in let\n (va_arg119:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_old_s) in128x6_b in let (va_arg118:Prims.nat) = len128 in let\n (va_arg117:Prims.nat) = len128x6 in let (va_arg116:Vale.AES.AES_common_s.algorithm) = alg in\n va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 569 column 30 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.lemma_gctr_partial_append va_arg116 va_arg117 va_arg118\n va_arg119 va_arg120 va_arg121 va_arg122 va_arg123 va_arg124 va_arg125) (let\n (va_arg115:Vale.Def.Types_s.quad32) = Vale.AES.GCTR.inc32lite (Vale.AES.GCTR.inc32lite ctr_BE_2\n len128x6) len128 in let (va_arg114:Vale.Def.Types_s.quad32) = ctr_BE_2 in let\n (va_arg113:(FStar.Seq.Base.seq Vale.Def.Types_s.nat32)) = key in let\n (va_arg112:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = cipher_byte_seq in let\n (va_arg111:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = plain_byte_seq in let\n (va_arg110:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b) in let\n (va_arg109:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) in128_b) in let (va_arg108:Prims.nat) =\n FStar.Seq.Base.length #quad32 plain_byte_seq in let (va_arg107:Prims.nat) = len128x6 + len128\n in let (va_arg106:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 575 column 30 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.lemma_gctr_partial_append va_arg106 va_arg107 va_arg108\n va_arg109 va_arg110 va_arg111 va_arg112 va_arg113 va_arg114 va_arg115) (let\n (va_arg105:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = auth_in in let\n (va_arg104:Vale.Def.Types_s.quad32) = y_0 in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 583 column 23 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.lemma_hash_append2 h_LE va_arg104 y_inout y_final va_arg105\n length_quad32) (let auth_in = FStar.Seq.Base.append #quad32 auth_in (FStar.Seq.Base.create\n #Vale.Def.Types_s.quad32 1 length_quad32) in let (va_arg103:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = auth_in in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 585 column 31 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GHash.ghash_incremental_to_ghash h_LE va_arg103) (va_QEmpty\n (()))))))))))))))))))))))))))))))))))))))))))))))))\n\n\nval va_lemma_Gcm_blocks : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> offset:int ->\n auth_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->\n in128_b:buffer128 -> out128_b:buffer128 -> inout_b:buffer128 -> iv_b:buffer128 ->\n scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks alg offset) va_s0 /\\ va_get_ok va_s0 /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in sse_enabled /\\ movbe_enabled /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0)\n Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp\n va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))))))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet\n 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 iv_b\n (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0)\n (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32\n /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let\n (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in let\n (plain_in:(seq quad32)) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) else FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in let (cipher_out:(seq quad32)) = (if (plain_num_bytes\n > (len128x6 + len128) `op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) else\n FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM)\n out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) in let\n (cipher_bound:nat) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128 `op_Division`\n 8) then (len128x6 + len128 + 1) else (len128x6 + len128)) in Vale.AES.GCTR.gctr_partial alg\n cipher_bound plain_in cipher_out key ctr_BE_2 /\\ (let (length_quad:quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0)\n auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let\n (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in let\n (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes in let\n (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 (FStar.Seq.Base.append #quad32\n auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in let (total_bytes:nat) =\n FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 + plain_num_bytes in let\n (raw_quad_seq:(seq quad32)) = (if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) then (let (ab:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32 raw_quad_seq\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let (pb:(seq\n nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32 pb) else\n raw_quad_seq) in let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 raw_quad_seq\n (FStar.Seq.Base.create #quad32 1 length_quad) in va_get_xmm 8 va_sM ==\n Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE h_LE auth_quad_seq) alg\n key 0))) /\\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0)))))))))))))))))))))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks va_b0 va_s0 alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;\n va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14;\n va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;\n va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;\n va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx;\n va_Mod_reg64 rRax; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks va_mods alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks alg offset) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 283 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n let (h_LE:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s0)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 396 column 56 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 397 column 52 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 398 column 57 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 399 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 400 column 58 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet\n 6 va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 403 column 39 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (plain_num_bytes < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 404 column 38 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRsi va_s0 < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 406 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 408 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (ctr_BE_1:quad32) = iv_BE in label va_range1\n \"***** POSTCONDITION NOT MET AT line 409 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 412 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (plain_in:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 421 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (cipher_out:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_sM) out128_b)) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 430 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (cipher_bound:nat) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> len128x6 + len128 + 1) (fun _ -> len128x6 + len128) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 434 column 77 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg cipher_bound plain_in cipher_out key ctr_BE_2) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 438 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (length_quad:quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply`\n plain_num_bytes) 0) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 440 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1\n va_s0) auth_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 444 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 445 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 446 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes\n in label va_range1\n \"***** POSTCONDITION NOT MET AT line 448 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 (FStar.Seq.Base.append #quad32\n auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 452 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (total_bytes:nat) = FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 +\n plain_num_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 453 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (raw_quad_seq:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply`\n 128 `op_Division` 8) (fun _ -> let (ab:(seq nat8)) = FStar.Seq.Base.slice\n #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32\n raw_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let\n (pb:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32\n pb) (fun _ -> raw_quad_seq) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 460 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32 raw_quad_seq\n (FStar.Seq.Base.create #quad32 1 length_quad) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 461 column 106 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 8 va_sM == Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE\n h_LE auth_quad_seq) alg key 0)))))))))))))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit ->\n Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n let (h_LE:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s0)) in sse_enabled /\\\n movbe_enabled /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0)\n (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64\n (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)))) /\\ (forall (va_x_mem:vale_heap)\n (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64)\n (va_x_rsi:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)\n (va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)\n (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)\n (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)\n (va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)\n (va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_heap1:vale_heap) (va_x_heap2:vale_heap)\n (va_x_heap3:vale_heap) (va_x_heap5:vale_heap) (va_x_heap6:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 6\n va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 3 va_x_heap3\n (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 15 va_x_xmm15\n (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11\n va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8\n (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4\n (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0\n (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13\n (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10\n (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64\n rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx\n va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem\n va_s0))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\\ (let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2\n (va_get_mem_heaplet 0 va_s0)) in Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet\n 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 iv_b\n (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0)\n (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32\n /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let\n (ctr_BE_1:quad32) = iv_BE in let (ctr_BE_2:quad32) = Vale.AES.GCTR_s.inc32 iv_BE 1 in let\n (plain_in:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in128_b)) in let (cipher_out:(seq quad32)) = va_if\n (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128 `op_Division` 8) (fun _ ->\n FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM)\n inout_b)) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) in let (cipher_bound:nat) = va_if (plain_num_bytes > (len128x6 + len128)\n `op_Multiply` 128 `op_Division` 8) (fun _ -> len128x6 + len128 + 1) (fun _ -> len128x6 +\n len128) in Vale.AES.GCTR.gctr_partial alg cipher_bound plain_in cipher_out key ctr_BE_2 /\\ (let\n (length_quad:quad32) = Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_s0) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0\n `op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1\n va_s0) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice\n #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64\n rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits\n auth_input_bytes in let (auth_quad_seq:(seq quad32)) = Vale.Def.Types_s.le_bytes_to_seq_quad32\n padded_auth_bytes in let (raw_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32\n (FStar.Seq.Base.append #quad32 auth_quad_seq (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0)\n in128x6_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b) in let (total_bytes:nat)\n = FStar.Seq.Base.length #quad32 auth_quad_seq `op_Multiply` 16 + plain_num_bytes in let\n (raw_quad_seq:(seq quad32)) = va_if (plain_num_bytes > (len128x6 + len128) `op_Multiply` 128\n `op_Division` 8) (fun _ -> let (ab:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.append #quad32 raw_quad_seq\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b))) 0 total_bytes in let (pb:(seq\n nat8)) = Vale.AES.GCTR_s.pad_to_128_bits ab in Vale.Def.Types_s.le_bytes_to_seq_quad32 pb) (fun\n _ -> raw_quad_seq) in let (auth_quad_seq:(seq quad32)) = FStar.Seq.Base.append #quad32\n raw_quad_seq (FStar.Seq.Base.create #quad32 1 length_quad) in va_get_xmm 8 va_sM ==\n Vale.AES.GCTR_s.gctr_encrypt_block ctr_BE_1 (Vale.AES.GHash_s.ghash_LE h_LE auth_quad_seq) alg\n key 0))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks : alg:algorithm -> offset:int -> auth_b:buffer128 -> abytes_b:buffer128\n -> in128x6_b:buffer128 -> out128x6_b:buffer128 -> in128_b:buffer128 -> out128_b:buffer128 ->\n inout_b:buffer128 -> iv_b:buffer128 -> scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq\n quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit ->\n Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b\n in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks alg offset) ([va_Mod_flags;\n va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2;\n va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm\n 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15;\n va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10;\n va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0\n va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b\n iv_b scratch_b key round_keys keys_b hkeys_b va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks (va_code_Gcm_blocks alg offset) va_s0 alg offset auth_b\n abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b\n hkeys_b in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0))))))))))))))))))))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit (va_code_Gcm_blocks alg\n offset)) =\n (va_QProc (va_code_Gcm_blocks alg offset) ([va_Mod_flags; va_Mod_mem_heaplet 6;\n va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;\n va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;\n va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64\n rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64\n rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64\n rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks alg offset auth_b\n abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b scratch_b key round_keys keys_b\n hkeys_b) (va_wpProof_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b scratch_b key round_keys keys_b hkeys_b))\n#pop-options\n//--\n//-- Gcm_blocks_wrapped\n#push-options \"--z3rlimit 60\"\n\nval va_code_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks_wrapped alg offset =\n (va_Block (va_CCons (va_code_Gcm_blocks alg offset) (va_CCons (va_Block (va_CNil ())) (va_CCons\n (va_Block (va_CNil ())) (va_CNil ())))))\n\nval va_codegen_success_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks_wrapped alg offset =\n (va_pbool_and (va_codegen_success_Gcm_blocks alg offset) (va_ttrue ()))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks_wrapped (va_mods:va_mods_t) (alg:algorithm) (offset:int) (auth_b:buffer128)\n (abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128)\n (out128_b:buffer128) (inout_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE)\n (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)\n (hkeys_b:buffer128) (expected_tag:(seq nat8)) : (va_quickCode unit (va_code_Gcm_blocks_wrapped\n alg offset)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 0) (va_get_stack va_s) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 8) (va_get_stack va_s) in let\n (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 16) (va_get_stack va_s) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 24) (va_get_stack va_s) in let\n (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s +\n offset + 32) (va_get_stack va_s) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 40) (va_get_stack va_s) in let\n (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset\n + 48) (va_get_stack va_s) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s + offset + 56) (va_get_stack va_s) in let\n (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s\n + offset + 64) (va_get_stack va_s) in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 739 column 15 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gcm_blocks alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b\n iv_b scratch_b key round_keys keys_b hkeys_b) (fun (va_s:va_state) _ -> va_qAssertSquash\n va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 741 column 37 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (8 `op_Multiply` va_get_reg64 rRsi va_old_s >= 0 /\\ 8 `op_Multiply` va_get_reg64 rRsi va_old_s\n <= 18446744073709551615 /\\ 8 `op_Multiply` plain_num_bytes >= 0 /\\ 8 `op_Multiply`\n plain_num_bytes <= 18446744073709551615) (fun _ -> let (va_arg55:Vale.Def.Types_s.quad32) =\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.Def.Types_s.insert_nat64\n (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8\n `op_Multiply` va_get_reg64 rRsi va_old_s) 1) (8 `op_Multiply` plain_num_bytes) 0) in let\n (va_arg54:Vale.Def.Types_s.quad32) = va_get_xmm 8 va_s in let\n (va_arg53:Vale.Def.Types_s.quad32) = Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem_heaplet 0 va_s)) in let\n (va_arg52:Vale.Def.Types_s.quad32) = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2\n va_old_s) in let (va_arg51:Vale.AES.GCM_s.supported_iv_LE) = iv in let (va_arg50:Prims.nat) =\n va_get_reg64 rRsi va_old_s in let (va_arg49:Prims.nat) = plain_num_bytes in let\n (va_arg48:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s) inout_b in let (va_arg47:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b in let\n (va_arg46:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_s) out128x6_b in let (va_arg45:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b in let\n (va_arg44:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) in128_b in let (va_arg43:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b in\n let (va_arg42:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 7 va_old_s) abytes_b in let (va_arg41:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_old_s) auth_b in let\n (va_arg40:(FStar.Seq.Base.seq Vale.Def.Words_s.nat32)) = key in let\n (va_arg39:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 741 column 37 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCM.gcm_blocks_dec_helper_simplified va_arg39 va_arg40 va_arg41\n va_arg42 va_arg43 va_arg44 va_arg45 va_arg46 va_arg47 va_arg48 va_arg49 va_arg50 va_arg51\n va_arg52 va_arg53 va_arg54 va_arg55) (let (auth_raw_quads:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) auth_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_old_s)\n abytes_b) in va_qAssertSquash va_range1\n \"***** EXPRESSION PRECONDITIONS NOT MET WITHIN line 751 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n ((fun a_1906 (s_1907:(FStar.Seq.Base.seq a_1906)) (i_1908:Prims.nat) (j_1909:Prims.nat) -> let\n (j_1869:Prims.nat) = j_1909 in Prims.b2t (Prims.op_AmpAmp (Prims.op_LessThanOrEqual i_1908\n j_1869) (Prims.op_LessThanOrEqual j_1869 (FStar.Seq.Base.length #a_1906 s_1907))))\n Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0 (va_get_reg64\n rRsi va_old_s)) (fun _ -> let (auth_bytes:(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)) =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n auth_raw_quads) 0 (va_get_reg64 rRsi va_old_s) in let (va_arg38:Vale.Def.Types_s.quad32) =\n Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_old_s) in let\n (va_arg37:Vale.AES.GCM_s.supported_iv_LE) = iv in let (va_arg36:Prims.nat) = plain_num_bytes in\n let (va_arg35:(FStar.Seq.Base.seq Vale.Def.Words_s.nat8)) = expected_tag in let\n (va_arg34:(FStar.Seq.Base.seq Vale.Def.Words_s.nat8)) = auth_bytes in let\n (va_arg33:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s) inout_b in let (va_arg32:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s) out128_b in let\n (va_arg31:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_s) out128x6_b in let (va_arg30:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_old_s) inout_b in let\n (va_arg29:(FStar.Seq.Base.seq Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_old_s) in128_b in let (va_arg28:(FStar.Seq.Base.seq\n Vale.Def.Types_s.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_old_s) in128x6_b in\n let (va_arg27:(FStar.Seq.Base.seq Vale.Def.Words_s.nat32)) = key in let\n (va_arg26:Vale.AES.AES_common_s.algorithm) = alg in va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 752 column 37 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCM.gcm_blocks_helper_dec_simplified va_arg26 va_arg27 va_arg28\n va_arg29 va_arg30 va_arg31 va_arg32 va_arg33 va_arg34 va_arg35 va_arg36 va_arg37 va_arg38)\n (va_QEmpty (()))))))))\n\n\nval va_lemma_Gcm_blocks_wrapped : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> offset:int ->\n auth_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->\n in128_b:buffer128 -> out128_b:buffer128 -> inout_b:buffer128 -> iv_b:buffer128 ->\n iv:supported_iv_LE -> scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) ->\n keys_b:buffer128 -> hkeys_b:buffer128 -> expected_tag:(seq nat8)\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks_wrapped alg offset) va_s0 /\\ va_get_ok\n va_s0 /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64\n rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in sse_enabled /\\ movbe_enabled /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0)\n Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp\n va_s0 + offset + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet\n 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2\n va_s0) (va_get_reg64 rR8 va_s0) iv_b 1 (va_get_mem_layout va_s0) Public /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6 va_s0) in128x6_ptr in128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6\n va_s0) out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) in128_ptr in128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1\n va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) inout_ptr inout_b 1\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 3\n va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0) hkeys_b 8\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\\ (let iv_BE =\n Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let h_LE =\n Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)\n in iv_BE == Vale.AES.GCM_s.compute_iv_BE h_LE iv))))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in Vale.X64.Decls.modifies_buffer128 out128_b\n (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0)\n (va_get_mem_heaplet 5 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b\n (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\\n va_get_reg64 rRsi va_s0 < pow2_32 /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0\n (va_get_mem_heaplet 2 va_s0) in let auth_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_s0) abytes_b) in let auth_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n auth_raw_quads) 0 (va_get_reg64 rRsi va_s0) in let plain_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0)\n in128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) in let plain_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n plain_raw_quads) 0 plain_num_bytes in let cipher_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) in let cipher_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n cipher_raw_quads) 0 plain_num_bytes in l_and (l_and (l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.nat8 auth_bytes < pow2_32) (FStar.Seq.Base.length #Vale.Def.Types_s.nat8\n plain_bytes < pow2_32)) (cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat8) #bool (Vale.AES.GCM_s.gcm_decrypt_LE alg\n (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes expected_tag)))\n (Vale.Def.Types_s.le_quad32_to_bytes (va_get_xmm 8 va_sM) == Vale.AES.GCM.gcm_decrypt_LE_tag\n alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes))) /\\\n va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet\n 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1\n va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM (va_update_xmm 12\n va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM\n (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM (va_update_reg64 rRbp va_sM\n (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM\n (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks_wrapped va_b0 va_s0 alg offset auth_b abytes_b in128x6_b out128x6_b in128_b\n out128_b inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b expected_tag =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;\n va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14;\n va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;\n va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;\n va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx;\n va_Mod_reg64 rRax; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks_wrapped va_mods alg offset auth_b abytes_b in128x6_b out128x6_b\n in128_b out128_b inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b expected_tag in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks_wrapped alg offset) va_qc\n va_s0 (fun va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 588 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n label va_range1\n \"***** POSTCONDITION NOT MET AT line 711 column 56 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 712 column 52 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 713 column 57 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 714 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 715 column 58 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out128x6_b (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet\n 6 va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 718 column 39 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (plain_num_bytes < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 719 column 38 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRsi va_s0 < pow2_32) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 721 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 723 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let auth_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) auth_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_s0)\n abytes_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 724 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let auth_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0 (va_get_reg64 rRsi va_s0) in label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 725 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_s0) in128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 726 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 727 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 6 va_sM) out128x6_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out128_b)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 728 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 737 column 52 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)\n (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32)) (cipher_bytes ==\n __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #bool\n (Vale.AES.GCM_s.gcm_decrypt_LE alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv\n plain_bytes auth_bytes expected_tag))) (Vale.Def.Types_s.le_quad32_to_bytes (va_get_xmm 8\n va_sM) == Vale.AES.GCM.gcm_decrypt_LE_tag alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE\n key) iv plain_bytes auth_bytes))))))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks_wrapped (alg:algorithm) (offset:int) (auth_b:buffer128) (abytes_b:buffer128)\n (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128) (out128_b:buffer128)\n (inout_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (scratch_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (expected_tag:(seq nat8))\n (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (let (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64\n (va_get_reg64 rRsp va_s0 + offset + 0) (va_get_stack va_s0) in let\n (in128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 8) (va_get_stack va_s0) in let (out128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 16) (va_get_stack va_s0) in\n let (len128x6:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 24) (va_get_stack va_s0) in let (in128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 32) (va_get_stack va_s0) in\n let (out128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 40) (va_get_stack va_s0) in let (len128:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 48) (va_get_stack va_s0) in\n let (inout_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 56) (va_get_stack va_s0) in let (plain_num_bytes:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 64) (va_get_stack va_s0) in\n sse_enabled /\\ movbe_enabled /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 +\n offset + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0)\n Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack\n va_s0) Public (va_get_stackTaint va_s0) /\\ Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64\n rRsp va_s0 + offset + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0) /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) auth_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 7 va_s0) abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 2 va_s0) (va_get_reg64 rR8 va_s0) iv_b 1\n (va_get_mem_layout va_s0) Public /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 6\n va_s0) in128x6_ptr in128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 6 va_s0) out128x6_ptr out128x6_b len128x6\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1\n va_s0) in128_ptr in128_b len128 (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) out128_ptr out128_b len128\n (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5\n va_s0) inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.validDstAddrs128\n (va_get_mem_heaplet 3 va_s0) (va_get_reg64 rRbp va_s0) scratch_b 9 (va_get_mem_layout va_s0)\n Secret /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0)\n hkeys_b 8 (va_get_mem_layout va_s0) Secret /\\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b;\n scratch_b; in128x6_b; out128x6_b; hkeys_b; in128_b; out128_b; inout_b]) /\\\n Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; in128x6_b; out128x6_b; in128_b;\n out128_b; inout_b; hkeys_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b;\n hkeys_b; in128_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; hkeys_b;\n out128x6_b; inout_b]) /\\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; hkeys_b;\n out128x6_b; out128_b]) /\\ (Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \\/ in128x6_b\n == out128x6_b) /\\ (Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \\/ in128_b == out128_b)\n /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ in128x6_ptr\n + 16 `op_Multiply` len128x6 < pow2_64 /\\ out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\\\n in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\\ out128_ptr + 16 `op_Multiply` len128 < pow2_64\n /\\ inout_ptr + 16 < pow2_64 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b ==\n va_get_reg64 rRdx va_s0 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1\n /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\\\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 out128_b /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128\n in128x6_b == len128x6 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==\n len128 /\\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\\\n plain_num_bytes < pow2_32 /\\ va_get_reg64 rRsi va_s0 < pow2_32 /\\ va_get_reg64 rR9 va_s0 + 32 <\n pow2_64 /\\ Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem_heaplet 0\n va_s0) + 128 < pow2_64 /\\ len128x6 `op_Modulus` 6 == 0 /\\ (len128x6 > 0 ==> len128x6 >= 6) /\\\n 12 + len128x6 + 6 < pow2_32 /\\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128\n (128 `op_Division` 8) <= plain_num_bytes /\\ plain_num_bytes < va_mul_nat len128x6 (128\n `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\\\n (va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\\\n va_get_reg64 rRsi va_s0 < va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128\n `op_Division` 8) /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rRcx va_s0)\n (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key\n (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\\ (let iv_BE =\n Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem_heaplet 2 va_s0) in let h_LE =\n Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)\n in iv_BE == Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\\ (forall (va_x_mem:vale_heap)\n (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64)\n (va_x_rsi:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)\n (va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)\n (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)\n (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)\n (va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)\n (va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_heap1:vale_heap) (va_x_heap2:vale_heap)\n (va_x_heap3:vale_heap) (va_x_heap5:vale_heap) (va_x_heap6:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 6\n va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 3 va_x_heap3\n (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 15 va_x_xmm15\n (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11\n va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8\n (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4\n (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0\n (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13\n (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10\n (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64\n rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx\n va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem\n va_s0))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\\ (let\n (abytes_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 0) (va_get_stack va_s0) in let (in128x6_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 8) (va_get_stack va_s0) in\n let (out128x6_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 16) (va_get_stack va_s0) in let (len128x6:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 24) (va_get_stack va_s0) in\n let (in128_ptr:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0\n + offset + 32) (va_get_stack va_s0) in let (out128_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 40) (va_get_stack va_s0) in\n let (len128:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +\n offset + 48) (va_get_stack va_s0) in let (inout_ptr:Vale.X64.Memory.nat64) =\n Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + offset + 56) (va_get_stack va_s0) in\n let (plain_num_bytes:Vale.X64.Memory.nat64) = Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp\n va_s0 + offset + 64) (va_get_stack va_s0) in Vale.X64.Decls.modifies_buffer128 out128_b\n (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1 va_sM) /\\ Vale.X64.Decls.modifies_buffer128\n iv_b (va_get_mem_heaplet 2 va_s0) (va_get_mem_heaplet 2 va_sM) /\\\n Vale.X64.Decls.modifies_buffer128 scratch_b (va_get_mem_heaplet 3 va_s0) (va_get_mem_heaplet 3\n va_sM) /\\ Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0)\n (va_get_mem_heaplet 5 va_sM) /\\ Vale.X64.Decls.modifies_buffer128 out128x6_b\n (va_get_mem_heaplet 6 va_s0) (va_get_mem_heaplet 6 va_sM) /\\ plain_num_bytes < pow2_32 /\\\n va_get_reg64 rRsi va_s0 < pow2_32 /\\ (let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0\n (va_get_mem_heaplet 2 va_s0) in let auth_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) auth_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_s0) abytes_b) in let auth_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n auth_raw_quads) 0 (va_get_reg64 rRsi va_s0) in let plain_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_s0) in128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0)\n in128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) in let plain_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n plain_raw_quads) 0 plain_num_bytes in let cipher_raw_quads = FStar.Seq.Base.append\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 6 va_sM) out128x6_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)\n out128_b)) (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) in let cipher_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes\n cipher_raw_quads) 0 plain_num_bytes in l_and (l_and (l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.nat8 auth_bytes < pow2_32) (FStar.Seq.Base.length #Vale.Def.Types_s.nat8\n plain_bytes < pow2_32)) (cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq\n Vale.Def.Types_s.nat8) #bool (Vale.AES.GCM_s.gcm_decrypt_LE alg\n (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes expected_tag)))\n (Vale.Def.Types_s.le_quad32_to_bytes (va_get_xmm 8 va_sM) == Vale.AES.GCM.gcm_decrypt_LE_tag\n alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes))) ==> va_k\n va_sM (())))\n\n\nval va_wpProof_Gcm_blocks_wrapped : alg:algorithm -> offset:int -> auth_b:buffer128 ->\n abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 -> in128_b:buffer128 ->\n out128_b:buffer128 -> inout_b:buffer128 -> iv_b:buffer128 -> iv:supported_iv_LE ->\n scratch_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> expected_tag:(seq nat8) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks_wrapped alg offset auth_b abytes_b in128x6_b\n out128x6_b in128_b out128_b inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b\n expected_tag va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_wrapped alg offset)\n ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3;\n va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks_wrapped alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b\n inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b expected_tag va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks_wrapped (va_code_Gcm_blocks_wrapped alg offset) va_s0\n alg offset auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b iv scratch_b key\n round_keys keys_b hkeys_b expected_tag in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 6 va_sM\n (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm\n 13 va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm\n 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM\n (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM\n (va_update_reg64 rRbp va_sM (va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rRbx va_sM\n (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem va_sM\n va_s0))))))))))))))))))))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet\n 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;\n va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm\n 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64\n rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64\n rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64\n rRax; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks_wrapped (alg:algorithm) (offset:int) (auth_b:buffer128)\n (abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (in128_b:buffer128)\n (out128_b:buffer128) (inout_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE)\n (scratch_b:buffer128) (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128)\n (hkeys_b:buffer128) (expected_tag:(seq nat8)) : (va_quickCode unit (va_code_Gcm_blocks_wrapped\n alg offset)) =\n (va_QProc (va_code_Gcm_blocks_wrapped alg offset) ([va_Mod_flags; va_Mod_mem_heaplet 6;\n va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;\n va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;\n va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64\n rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64\n rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64\n rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks_wrapped alg offset\n auth_b abytes_b in128x6_b out128x6_b in128_b out128_b inout_b iv_b iv scratch_b key round_keys\n keys_b hkeys_b expected_tag) (va_wpProof_Gcm_blocks_wrapped alg offset auth_b abytes_b\n in128x6_b out128x6_b in128_b out128_b inout_b iv_b iv scratch_b key round_keys keys_b hkeys_b\n expected_tag))\n#pop-options\n//--\n//-- Gcm_blocks_decrypt_stdcall\n#push-options \"--z3rlimit 1600\"\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks_decrypt_stdcall win alg =\n (va_Block (va_CCons (va_code_CreateHeaplets ()) (va_CCons (va_Block (va_CNil ())) (va_CCons\n (va_code_Save_registers win) (va_CCons (if win then va_Block (va_CCons (va_code_Mov64\n (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRcx)) (va_CCons (va_code_Mov64\n (va_op_dst_opr64_reg64 rRsi) (va_op_opr64_reg64 rRdx)) (va_CCons (va_code_Mov64\n (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR8)) (va_CCons (va_code_Mov64\n (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rR9)) (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rR8) (va_op_reg_opr64_reg64 rRsp) (224 + 40 + 0)) (va_CCons\n (va_code_Load64_stack (va_op_dst_opr64_reg64 rR9) (va_op_reg_opr64_reg64 rRsp) (224 + 40 + 8))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRbp) (va_op_reg_opr64_reg64 rRsp) (224\n + 40 + 88)) (va_CNil ())))))))) else va_Block (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64 rRbp) (va_op_reg_opr64_reg64 rRsp) (64 + 8 + 72)) (va_CNil ())))\n (va_CCons (va_code_Gcm_blocks_wrapped alg (Vale.X64.Decls.total_if #int win (224 + 56) (64 +\n 8))) (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rR15) (va_op_reg_opr64_reg64 rRsp)\n (Vale.X64.Decls.total_if #int win (224 + 40 + 96) (64 + 8 + 80))) (va_CCons\n (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 0) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rR15) 0 Secret) (va_CCons (va_code_XmmEqual (va_op_xmm_xmm 0) (va_op_xmm_xmm 8)) (va_CCons\n (va_Block (va_CNil ())) (va_CCons (va_Block (va_CNil ())) (va_CCons (va_code_Mov64\n (va_op_dst_opr64_reg64 rRcx) (va_op_opr64_reg64 rRax)) (va_CCons (va_code_Restore_registers\n win) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRax) (va_op_opr64_reg64 rRcx)) (va_CCons\n (va_code_DestroyHeaplets ()) (va_CNil ()))))))))))))))))\n\nPartial implementation:\nlet va_codegen_success_Gcm_blocks_decrypt_stdcall win alg =\n \n"
},
{
"role": "assistant",
"content": "(va_pbool_and (va_codegen_success_CreateHeaplets ())\n (va_pbool_and (va_codegen_success_Save_registers win)\n (va_pbool_and (if win\n then\n va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRsi)\n (va_op_opr64_reg64 rRdx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR8))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRcx)\n (va_op_opr64_reg64 rR9))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rR8\n )\n (va_op_reg_opr64_reg64 rRsp)\n (224 + 40 + 0))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64\n rR9)\n (va_op_reg_opr64_reg64 rRsp)\n (224 + 40 + 8))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64\n rRbp)\n (va_op_reg_opr64_reg64 rRsp)\n (224 + 40 + 88))\n (va_ttrue ())))))))\n else\n va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rRbp)\n (va_op_reg_opr64_reg64 rRsp)\n (64 + 8 + 72))\n (va_ttrue ()))\n (va_pbool_and (va_codegen_success_Gcm_blocks_wrapped alg\n (Vale.X64.Decls.total_if #int win (224 + 56) (64 + 8)))\n (va_pbool_and (va_codegen_success_Load64_stack (va_op_dst_opr64_reg64 rR15)\n (va_op_reg_opr64_reg64 rRsp)\n (Vale.X64.Decls.total_if #int win (224 + 40 + 96) (64 + 8 + 80)))\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 0)\n (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rR15)\n 0\n Secret)\n (va_pbool_and (va_codegen_success_XmmEqual (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 8))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRcx)\n (va_op_opr64_reg64 rRax))\n (va_pbool_and (va_codegen_success_Restore_registers win)\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRax\n )\n (va_op_opr64_reg64 rRcx))\n (va_pbool_and (va_codegen_success_DestroyHeaplets ())\n (va_ttrue ()))))))))))))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: Prims.unit\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nlet lift_id_st_wp #a #st (w : ID5.wp a) : wp st a =\n elim_pure_wp_monotonicity_forall ();\n fun s0 p -> w (fun x -> p x s0)\n\nlet lift_id_st a wp st (f : ID5.repr a wp)\n : repr a st (lift_id_st_wp wp)\n = fun s0 -> ID5.ID?.reflect (ID5.bind _ _ _ _ f (fun x -> ID5.return _ (x, s0)))\n\nsub_effect ID ~> ST = lift_id_st\n\nlet null #st #a : wp st a =\n fun s0 p -> forall x s1. p x s1\n\nlet get #st () : ST st st (fun s0 p -> p s0 s0) =\n ST?.reflect (fun s0 -> (s0, s0))\n\nlet put #st (s:st) : ST unit st (fun _ p -> p () s) =\n ST?.reflect (fun _ -> ((), s))\n\n// this now works!!!\nlet test () : ST int int null =\n let x = get () in\n put (x + x);\n get () + get ()\n\nlet addx (x:int) : ST unit int (fun s0 p -> p () (s0+x)) =\n let y = get () in\n put (x+y)\n\nlet add_via_state (x y : int) : ST int int (fun s0 p -> p (x+y) s0) =\n let o = get () in\n put x;\n addx y;\n let r = get () in\n put o;\n r\n\nPartial implementation:\nlet main =\n \n"
},
{
"role": "assistant",
"content": "let r, n = reify (reify (add_via_state 1 2) 3) (Ghost.hide (fun _ -> True)) () in\nFStar.IO.print_string (FStar.Printf.sprintf \"%d:%d\\n\" r n)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: n: Prims.nat -> k: Prims.nat -> Type0\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2017 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n(*\nA standard library for manipulation of value bytes.\n\nThis model is realized by Bytes.bytes in OCaml and by\nstruct {uintX_t size; char *bytes} (or similar) in C.\n\nThis file is essentially a specialized version of FStar.Seq,\nwith lemmas and refinements taylored for typical operations on\nbytes, and with support for machine integers and C-extractible versions\n(which Seq does not provide.)\n\n@summary Value bytes standard library\n*)\nmodule FStar.Bytes\n\nmodule S = FStar.Seq\nmodule U = FStar.UInt\nmodule U8 = FStar.UInt8\nmodule U16 = FStar.UInt16\nmodule U32 = FStar.UInt32\nmodule U64 = FStar.UInt64\nmodule Str = FStar.String\nmodule Chr = FStar.Char\n\nunfold let u8 = U8.t\nunfold let u16 = U16.t\nunfold let u32 = U32.t\n\n(** Realized by uint8_t in C and int in OCaml (char does not have necessary operators...) *)\nunfold type byte = u8\n\n(** Realized in C by a pair of a length field and uint8_t* in C\n Realized in OCaml by a string *)\nval bytes : t:Type0{hasEq t}\nval len : bytes -> u32\n\nunfold let length b = FStar.UInt32.v (len b)\n\n(** representation for specs that need lemmas not defined here. *)\nval reveal:\n bytes\n -> GTot (S.seq byte)\n\nval length_reveal:\n x:bytes\n -> Lemma (ensures (S.length (reveal x) = length x))\n [SMTPatOr [[SMTPat (S.length (reveal x))];\n [SMTPat (len x)]]]\n\nval hide:\n s:S.seq byte{S.length s < pow2 32}\n -> GTot bytes\n\nval hide_reveal:\n x:bytes\n -> Lemma (ensures (hide (reveal x) = x))\n [SMTPat (reveal x)]\n\nval reveal_hide:\n x:S.seq byte{S.length x < pow2 32}\n -> Lemma (ensures (reveal (hide x) == x))\n [SMTPat (hide x)]\n\ntype lbytes (l:nat) = b:bytes{length b = l}\ntype kbytes (k:nat) = b:bytes{length b < pow2 k}\n\nlet lbytes32 (l:UInt32.t) = b:bytes{len b = l}\n\nval empty_bytes : lbytes 0\nval empty_unique:\n b:bytes\n -> Lemma (length b = 0 ==> b = empty_bytes)\n [SMTPat (len b)]\n\n(** If you statically know the length, it is OK to read at arbitrary indexes *)\nval get:\n b:bytes\n -> pos:u32{U32.v pos < length b}\n -> Pure byte\n (requires True)\n (ensures (fun y -> y == S.index (reveal b) (U32.v pos)))\n\nunfold let op_String_Access = get\n\nunfold let index (b:bytes) (i:nat{i < length b}) = get b (U32.uint_to_t i)\n\nlet equal b1 b2 =\n length b1 = length b2 /\\\n (forall (i:u32{U32.v i < length b1}).{:pattern (b1.[i]); (b2.[i])} b1.[i] == b2.[i])\n\nval extensionality:\n b1:bytes\n -> b2:bytes\n -> Lemma (requires (equal b1 b2))\n (ensures (b1 = b2))\n\n(** creating byte values **)\nval create:\n len:u32\n -> v:byte\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == v}\n\nunfold\nlet create_ (n:nat{FStar.UInt.size n U32.n}) v = create (U32.uint_to_t n) v\n\nval init:\n len:u32\n -> f:(i:u32{U32.(i <^ len)} -> byte)\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == f i}\n\n// this is a hack JROESCH\nval abyte (b:byte) : lbytes 1\n (* admit () create 1ul b *)\n\nval twobytes (b:byte*byte) : lbytes 2\n // init 2ul (fun i -> if i = 0ul then fst b else snd b)\n\n(** appending bytes **)\nval append:\n b1:bytes\n -> b2:bytes\n -> Pure bytes\n (requires (UInt.size (length b1 + length b2) U32.n))\n (ensures (fun b -> reveal b == S.append (reveal b1) (reveal b2)))\nunfold let op_At_Bar = append\n\nval slice:\n b:bytes\n -> s:u32\n -> e:u32{U32.(s <=^ e) /\\ U32.v e <= length b}\n -> r:bytes{reveal r == Seq.slice (reveal b) (U32.v s) (U32.v e)}\nlet slice_ b (s:nat) (e:nat{s <= e /\\ e <= length b}) = slice b (U32.uint_to_t s) (U32.uint_to_t e)\n\nval sub:\n b:bytes\n -> s:u32\n -> l:u32{U32.v s + U32.v l <= length b}\n -> r:bytes{reveal r == Seq.slice (reveal b) (U32.v s) (U32.v s + U32.v l)}\n\nval split:\n b:bytes\n -> k:u32{U32.v k <= length b}\n -> p:(bytes*bytes){\n let x, y = p in\n (reveal x, reveal y) == Seq.split (reveal b) (U32.v k)}\n\nunfold let split_ b (k:nat{FStar.UInt.size k U32.n /\\ k < length b}) = split b (U32.uint_to_t k)\n\nPartial implementation:\nlet fits_in_k_bytes (n k: nat) =\n \n"
},
{
"role": "assistant",
"content": "FStar.UInt.size n (op_Multiply 8 k)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: c: FStar.Tactics.NamedView.comp -> FStar.Pervasives.Native.option Pulse.Syntax.Base.comp\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nlet simplify_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post)\n = st_equiv_post d post (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))\n\nlet simplify_lemma (c:comp_st) (c':comp_st) (post_hint:option post_hint_t)\n : Lemma\n (requires\n comp_post_matches_hint c post_hint /\\\n effect_annot_of_comp c == effect_annot_of_comp c' /\\\n comp_res c' == comp_res c /\\\n comp_u c' == comp_u c /\\\n comp_post c' == tm_star (comp_post c) tm_emp)\n (ensures comp_post_matches_hint (comp_st_with_post c' (comp_post c)) post_hint /\\\n comp_pre (comp_st_with_post c' (comp_post c)) == comp_pre c')\n = ()\n\nlet vprop_equiv_typing_bk (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g p ctxt)\n : tot_typing g p tm_vprop \n = let _, bk = vprop_equiv_typing d in\n bk ctxt_typing\n\nlet comp_with_pre (c:comp_st) (pre:term) =\n match c with\n | C_ST st -> C_ST { st with pre }\n | C_STGhost st -> C_STGhost { st with pre }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with pre}\n\n\nlet st_equiv_pre (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (pre:term)\n (veq: vprop_equiv g (comp_pre c) pre)\n : st_typing g t (comp_with_pre c pre)\n = if eq_tm pre (comp_pre c) then d\n else\n let c' = comp_with_pre c pre in\n let (| u_of, pre_typing, x, post_typing |) =\n Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) veq (VE_Refl _ _)\n in\n t_equiv d st_equiv\n\n\n#push-options \"--z3rlimit_factor 4 --ifuel 2 --fuel 0\"\nlet k_elab_equiv_continuation (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt #ctxt1 #ctxt2:term)\n (k:continuation_elaborator g1 ctxt g2 ctxt1)\n (d:vprop_equiv g2 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt g2 ctxt2 =\n fun post_hint res ->\n let (| st, c, st_d |) = res in\n let st_d : st_typing g2 st c = st_d in\n assert (comp_pre c == ctxt2);\n let st_d' : st_typing g2 st (comp_with_pre c ctxt1) = st_equiv_pre st_d _ (VE_Sym _ _ _ d) in\n k post_hint (| st, _, st_d' |)\n#pop-options\n\nlet vprop_equiv_typing_fwd (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g ctxt p)\n : tot_typing g p tm_vprop \n = let fwd, _ = vprop_equiv_typing d in\n fwd ctxt_typing\n\n#push-options \"--z3rlimit_factor 4 --ifuel 1 --fuel 0\"\nlet k_elab_equiv_prefix\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt2 #ctxt:term)\n (k:continuation_elaborator g1 ctxt1 g2 ctxt)\n (d:vprop_equiv g1 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt2 g2 ctxt =\n fun post_hint res ->\n let framing_token : frame_for_req_in_ctxt g1 ctxt2 ctxt1 = \n let d = VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Trans _ _ _ _ (VE_Unit _ _) d) in\n (| tm_emp, emp_typing, d |)\n in\n let res = k post_hint res in\n let (| st, c, st_d |) = res in\n assert (comp_pre c == ctxt1);\n (| _, _, st_equiv_pre st_d _ d |)\n #pop-options\n\nlet k_elab_equiv\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt1' #ctxt2 #ctxt2':term) \n (k:continuation_elaborator g1 ctxt1 g2 ctxt2)\n (d1:vprop_equiv g1 ctxt1 ctxt1')\n (d2:vprop_equiv g2 ctxt2 ctxt2')\n : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n \n let k : continuation_elaborator g1 ctxt1 g2 ctxt2' =\n k_elab_equiv_continuation k d2 in\n let k : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n k_elab_equiv_prefix k d1 in\n k\n\n#push-options \"--query_stats --fuel 2 --ifuel 2 --split_queries no --z3rlimit_factor 20\"\nopen Pulse.PP\nlet continuation_elaborator_with_bind (#g:env) (ctxt:term)\n (#c1:comp{stateful_comp c1})\n (#e1:st_term)\n (e1_typing:st_typing g e1 c1)\n (ctxt_pre1_typing:tot_typing g (tm_star ctxt (comp_pre c1)) tm_vprop)\n (x:nvar { None? (lookup g (snd x)) })\n : T.Tac (continuation_elaborator\n g\n (tm_star ctxt (comp_pre c1))\n (push_binding g (snd x) (fst x) (comp_res c1))\n (tm_star (open_term (comp_post c1) (snd x)) ctxt)) =\n\n let pre1 = comp_pre c1 in\n let res1 = comp_res c1 in\n let post1 = comp_post c1 in\n let ctxt_typing = star_typing_inversion_l ctxt_pre1_typing in\n // let p_prop = Metatheory.pure_typing_inversion pure_typing in\n let v_eq = VE_Comm g ctxt pre1 in\n let framing_token : frame_for_req_in_ctxt g (tm_star ctxt pre1) pre1 = \n (| ctxt, ctxt_typing, VE_Comm g pre1 ctxt |)\n in\n let (| c1, e1_typing |) =\n apply_frame ctxt_pre1_typing e1_typing framing_token in\n let (| u_of_1, pre_typing, _, _ |) =\n Metatheory.(st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness e1_typing))) in\n let b = res1 in\n let ppname, x = x in\n let g' = push_binding g x ppname b in\n \n let post1_opened = open_term_nv post1 (v_as_nv x) in\n let k : continuation_elaborator g (tm_star ctxt pre1) g' (tm_star post1_opened ctxt) =\n fun post_hint res ->\n let (| e2, c2, e2_typing |) = res in\n assert (comp_post_matches_hint c2 post_hint);\n let e2_typing : st_typing g' e2 c2 = e2_typing in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term e2_closed x == e2);\n assert (comp_pre c1 == (tm_star ctxt pre1));\n assert (comp_post c1 == tm_star post1 ctxt);\n assert (comp_pre c2 == tm_star post1_opened ctxt);\n assert (open_term (comp_post c1) x == tm_star post1_opened (open_term ctxt x));\n // ctxt is well-typed, hence ln\n assume (open_term ctxt x == ctxt);\n assert (open_term (comp_post c1) x == comp_pre c2);\n // we closed e2 with x\n assume (~ (x `Set.mem` freevars_st e2_closed));\n if x `Set.mem` freevars (comp_post c2)\n then fail g' None \"Impossible: freevar clash when constructing continuation elaborator for bind, please file a bug-report\"\n else (\n let t_typing, post_typing =\n Pulse.Typing.Combinators.bind_res_and_post_typing g c2 x post_hint in\n let g = push_context g \"mk_bind\" e1.range in\n // info_doc g None\n // [prefix 4 1 (doc_of_string \"mk_bind e1 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e1));\n // prefix 4 1 (doc_of_string \"mk_bind c1 = \") (pp #comp c1);\n // prefix 4 1 (doc_of_string \"mk_bind e2 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e2));\n // prefix 4 1 (doc_of_string \"mk_bind c2 = \") (pp #comp c2)]\n // ;\n let (| e, c, e_typing |) =\n Pulse.Typing.Combinators.mk_bind\n g (tm_star ctxt pre1) \n e1 e2_closed c1 c2 (ppname, x) e1_typing\n u_of_1 \n e2_typing\n t_typing\n post_typing\n (Some? post_hint)\n in\n (| e, c, e_typing |)\n )\n in\n k\n#pop-options\n\nmodule LN = Pulse.Typing.LN\n#push-options \"--z3rlimit_factor 4 --fuel 1 --ifuel 1\"\n\n\nlet st_comp_typing_with_post_hint \n (#g:env) (#ctxt:_)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (post_hint:post_hint_opt g { Some? post_hint })\n (c:comp_st { comp_pre c == ctxt /\\ comp_post_matches_hint c post_hint })\n: st_comp_typing g (st_comp_of_comp c)\n= let st = st_comp_of_comp c in\n let Some ph = post_hint in\n let post_typing_src\n : tot_typing (push_binding ph.g ph.x ppname_default ph.ret_ty)\n (open_term ph.post ph.x) tm_vprop\n = ph.post_typing_src\n in\n let x = fresh g in\n assume (fresh_wrt x g (freevars ph.post));\n assume (None? (lookup g ph.x));\n let post_typing_src\n : tot_typing (push_binding ph.g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = if x = ph.x\n then post_typing_src\n else \n let open Pulse.Typing.Metatheory.Base in\n let tt :\n tot_typing\n (push_binding ph.g x ppname_default ph.ret_ty)\n (subst_term (open_term ph.post ph.x) (renaming ph.x x))\n (subst_term tm_vprop (renaming ph.x x)) =\n tot_typing_renaming1 ph.g ph.x ph.ret_ty (open_term ph.post ph.x) tm_vprop post_typing_src x\n in\n assert (subst_term tm_vprop (renaming ph.x x) == tm_vprop);\n assume (subst_term (open_term ph.post ph.x) (renaming ph.x x) ==\n open_term ph.post x);\n tt\n in\n let post_typing_src\n : tot_typing (push_binding g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = //weakening: TODO\n RU.magic ()\n in\n let ty_typing : universe_of ph.g st.res st.u = ph.ty_typing in\n let ty_typing : universe_of g st.res st.u =\n Pulse.Typing.Metatheory.tot_typing_weakening_standard ph.g ty_typing g\n in\n assert (st.res == ph.ret_ty);\n assert (st.post == ph.post);\n STC g st x ty_typing ctxt_typing post_typing_src\n\nlet continuation_elaborator_with_bind_fn (#g:env) (#ctxt:term)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#e1:st_term)\n (#c1:comp { C_Tot? c1 })\n (b:binder{b.binder_ty == comp_res c1})\n (e1_typing:st_typing g e1 c1)\n (x:nvar { None? (lookup g (snd x)) })\n: T.Tac (continuation_elaborator\n g ctxt\n (push_binding g (snd x) ppname_default (comp_res c1)) ctxt)\n= let t1 = comp_res c1 in\n assert ((push_binding g (snd x) (fst x) t1) `env_extends` g);\n fun post_hint (| e2, c2, d2 |) ->\n if None? post_hint then T.fail \"bind_fn: expects the post_hint to be set\";\n let ppname, x = x in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term (close_st_term e2 x) x == e2);\n let e = wr c2 (Tm_Bind {binder=b; head=e1; body=e2_closed}) in\n let (| u, c1_typing |) = Pulse.Typing.Metatheory.Base.st_typing_correctness_ctot e1_typing in\n let c2_typing : comp_typing g c2 (universe_of_comp c2) =\n match c2 with\n | C_ST st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_ST _ _ stc\n \n | C_STAtomic i obs st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n let i_typing = CP.core_check_term g i T.E_Total tm_inames in\n CT_STAtomic _ _ obs _ i_typing stc\n\n | C_STGhost st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_STGhost _ _ stc\n in\n let d : st_typing g e c2 =\n T_BindFn g e1 e2_closed c1 c2 b x e1_typing u c1_typing d2 c2_typing\n in\n (| e, c2, d |)\n\nlet rec check_equiv_emp (g:env) (vp:term)\n : option (vprop_equiv g vp tm_emp)\n = match vp.t with\n | Tm_Emp -> Some (VE_Refl _ _)\n | Tm_Star vp1 vp2 ->\n (match check_equiv_emp g vp1, check_equiv_emp g vp2 with\n | Some d1, Some d2 ->\n let d3 : vprop_equiv g (tm_star vp1 vp2) (tm_star tm_emp tm_emp)\n = VE_Ctxt _ _ _ _ _ d1 d2 in\n let d4 : vprop_equiv g (tm_star tm_emp tm_emp) tm_emp =\n VE_Unit _ _ in\n Some (VE_Trans _ _ _ _ d3 d4)\n | _, _ -> None)\n | _ -> None\n\n\n\nlet emp_inames_included (g:env) (i:term) (_:tot_typing g i tm_inames)\n: prop_validity g (tm_inames_subset tm_emp_inames i)\n= RU.magic()\n\nlet return_in_ctxt (g:env) (y:var) (y_ppname:ppname) (u:universe) (ty:term) (ctxt:vprop)\n (ty_typing:universe_of g ty u)\n (post_hint0:post_hint_opt g { Some? post_hint0 /\\ checker_res_matches_post_hint g post_hint0 y ty ctxt})\n: Pure (st_typing_in_ctxt g ctxt post_hint0)\n (requires lookup g y == Some ty)\n (ensures fun _ -> True)\n= let Some post_hint = post_hint0 in\n let x = fresh g in\n assume (~ (x `Set.mem` freevars post_hint.post));\n let ctag =\n match post_hint.effect_annot with\n | EffectAnnotAtomic _ -> STT_Atomic\n | EffectAnnotGhost -> STT_Ghost\n | _ -> STT\n in\n let y_tm = tm_var {nm_index=y;nm_ppname=y_ppname} in\n let d = T_Return g ctag false u ty y_tm post_hint.post x ty_typing\n (RU.magic ()) // that null_var y is well typed at ty in g, we know since lookup g y == Some ty\n (RU.magic ()) // typing of (open post x) in (g, x) ... post_hint is well-typed, so should get\n in\n let t = wtag (Some ctag) (Tm_Return {expected_type=tm_unknown;insert_eq=false;term=y_tm}) in\n let c = comp_return ctag false u ty y_tm post_hint.post x in\n let d : st_typing g t c = d in\n assume (comp_u c == post_hint.u); // this u should follow from equality of t\n match c, post_hint.effect_annot with\n | C_STAtomic _ obs _, EffectAnnotAtomic { opens } ->\n assert (comp_inames c == tm_emp_inames);\n let pht = post_hint_typing g post_hint x in\n let validity = emp_inames_included g opens pht.effect_annot_typing in\n let d = T_Sub _ _ _ _ d (STS_AtomicInvs _ (st_comp_of_comp c) tm_emp_inames opens obs obs validity) in\n (| _, _, d |)\n | _ -> \n (| _, _, d |)\n\nlet match_comp_res_with_post_hint (#g:env) (#t:st_term) (#c:comp_st)\n (d:st_typing g t c)\n (post_hint:post_hint_opt g)\n : T.Tac (t':st_term &\n c':comp_st &\n st_typing g t' c') =\n\n match post_hint with\n | None -> (| t, c, d |)\n | Some { ret_ty } ->\n let cres = comp_res c in\n if eq_tm cres ret_ty\n then (| t, c, d |)\n else match Pulse.Checker.Pure.check_equiv g cres ret_ty with\n | None ->\n fail g (Some t.range)\n (Printf.sprintf \"Could not prove equiv for computed type %s and expected type %s\"\n (P.term_to_string cres)\n (P.term_to_string ret_ty))\n | Some tok ->\n let d_equiv\n : RT.equiv _ (elab_term cres) (elab_term ret_ty) =\n RT.Rel_eq_token _ _ _ (FStar.Squash.return_squash tok) in\n \n let c' = with_st_comp c {(st_comp_of_comp c) with res = ret_ty } in\n let (| cres_typing, cpre_typing, x, cpost_typing |) =\n st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness d)) in\n\n let d_stequiv : st_equiv g c c' =\n ST_VPropEquiv _ c c' _ cpre_typing cres_typing cpost_typing d_equiv (VE_Refl _ _) (VE_Refl _ _)\n in\n\n (| t, c', T_Equiv _ _ _ _ d d_stequiv |)\n\nlet apply_checker_result_k (#g:env) (#ctxt:vprop) (#post_hint:post_hint_for_env g)\n (r:checker_result_t g ctxt (Some post_hint))\n (res_ppname:ppname)\n : T.Tac (st_typing_in_ctxt g ctxt (Some post_hint)) =\n\n // TODO: FIXME add to checker result type?\n let (| y, g1, (| u_ty, ty_y, d_ty_y |), (| pre', _ |), k |) = r in\n\n let (| u_ty_y, d_ty_y |) = Pulse.Checker.Pure.check_universe g1 ty_y in\n\n let d : st_typing_in_ctxt g1 pre' (Some post_hint) =\n return_in_ctxt g1 y res_ppname u_ty_y ty_y pre' d_ty_y (Some post_hint) in\n\n k (Some post_hint) d\n\n#push-options \"--z3rlimit_factor 4 --fuel 0 --ifuel 1 --query_stats\"\nlet checker_result_for_st_typing (#g:env) (#ctxt:vprop) (#post_hint:post_hint_opt g)\n (d:st_typing_in_ctxt g ctxt post_hint)\n (ppname:ppname)\n : T.Tac (checker_result_t g ctxt post_hint) =\n\n let (| t, c, d |) = d in\n \n let x = fresh g in\n\n let g' = push_binding g x ppname (comp_res c) in\n let ctxt' = open_term_nv (comp_post c) (ppname, x) in\n let k\n : continuation_elaborator\n g (tm_star tm_emp (comp_pre c))\n g' (tm_star ctxt' tm_emp) =\n continuation_elaborator_with_bind tm_emp d (RU.magic ()) (ppname, x) in\n let k\n : continuation_elaborator g (comp_pre c) g' ctxt' =\n k_elab_equiv k (RU.magic ()) (RU.magic ()) in\n\n let _ : squash (checker_res_matches_post_hint g post_hint x (comp_res c) ctxt') =\n match post_hint with\n | None -> ()\n | Some post_hint -> () in\n\n assert (g' `env_extends` g);\n\n let comp_res_typing, _, f =\n Metatheory.(st_comp_typing_inversion_cofinite (fst <| comp_typing_inversion (st_typing_correctness d))) in\n\n // RU.magic is the typing of comp_res in g'\n // weaken comp_res_typing\n\n assume (~ (x `Set.mem` freevars (comp_post c)));\n let tt : universe_of _ _ _ = RU.magic () in\n (| x, g', (| comp_u c, comp_res c, tt |), (| ctxt', f x |), k |)\n#pop-options\n\nmodule R = FStar.Reflection.V2\n\nPartial implementation:\nlet readback_comp_res_as_comp (c: T.comp) : option comp =\n \n"
},
{
"role": "assistant",
"content": "match c with\n| T.C_Total t ->\n (match readback_comp t with\n | None -> None\n | Some c -> Some c)\n| _ -> None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: b1: FStar.Bytes.minbytes n -> b2: FStar.Bytes.minbytes n\n -> b: FStar.Bytes.bytes{FStar.Bytes.len b = FStar.UInt32.uint_to_t n}\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2017 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n(*\nA standard library for manipulation of value bytes.\n\nThis model is realized by Bytes.bytes in OCaml and by\nstruct {uintX_t size; char *bytes} (or similar) in C.\n\nThis file is essentially a specialized version of FStar.Seq,\nwith lemmas and refinements taylored for typical operations on\nbytes, and with support for machine integers and C-extractible versions\n(which Seq does not provide.)\n\n@summary Value bytes standard library\n*)\nmodule FStar.Bytes\n\nmodule S = FStar.Seq\nmodule U = FStar.UInt\nmodule U8 = FStar.UInt8\nmodule U16 = FStar.UInt16\nmodule U32 = FStar.UInt32\nmodule U64 = FStar.UInt64\nmodule Str = FStar.String\nmodule Chr = FStar.Char\n\nunfold let u8 = U8.t\nunfold let u16 = U16.t\nunfold let u32 = U32.t\n\n(** Realized by uint8_t in C and int in OCaml (char does not have necessary operators...) *)\nunfold type byte = u8\n\n(** Realized in C by a pair of a length field and uint8_t* in C\n Realized in OCaml by a string *)\nval bytes : t:Type0{hasEq t}\nval len : bytes -> u32\n\nunfold let length b = FStar.UInt32.v (len b)\n\n(** representation for specs that need lemmas not defined here. *)\nval reveal:\n bytes\n -> GTot (S.seq byte)\n\nval length_reveal:\n x:bytes\n -> Lemma (ensures (S.length (reveal x) = length x))\n [SMTPatOr [[SMTPat (S.length (reveal x))];\n [SMTPat (len x)]]]\n\nval hide:\n s:S.seq byte{S.length s < pow2 32}\n -> GTot bytes\n\nval hide_reveal:\n x:bytes\n -> Lemma (ensures (hide (reveal x) = x))\n [SMTPat (reveal x)]\n\nval reveal_hide:\n x:S.seq byte{S.length x < pow2 32}\n -> Lemma (ensures (reveal (hide x) == x))\n [SMTPat (hide x)]\n\ntype lbytes (l:nat) = b:bytes{length b = l}\ntype kbytes (k:nat) = b:bytes{length b < pow2 k}\n\nlet lbytes32 (l:UInt32.t) = b:bytes{len b = l}\n\nval empty_bytes : lbytes 0\nval empty_unique:\n b:bytes\n -> Lemma (length b = 0 ==> b = empty_bytes)\n [SMTPat (len b)]\n\n(** If you statically know the length, it is OK to read at arbitrary indexes *)\nval get:\n b:bytes\n -> pos:u32{U32.v pos < length b}\n -> Pure byte\n (requires True)\n (ensures (fun y -> y == S.index (reveal b) (U32.v pos)))\n\nunfold let op_String_Access = get\n\nunfold let index (b:bytes) (i:nat{i < length b}) = get b (U32.uint_to_t i)\n\nlet equal b1 b2 =\n length b1 = length b2 /\\\n (forall (i:u32{U32.v i < length b1}).{:pattern (b1.[i]); (b2.[i])} b1.[i] == b2.[i])\n\nval extensionality:\n b1:bytes\n -> b2:bytes\n -> Lemma (requires (equal b1 b2))\n (ensures (b1 = b2))\n\n(** creating byte values **)\nval create:\n len:u32\n -> v:byte\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == v}\n\nunfold\nlet create_ (n:nat{FStar.UInt.size n U32.n}) v = create (U32.uint_to_t n) v\n\nval init:\n len:u32\n -> f:(i:u32{U32.(i <^ len)} -> byte)\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == f i}\n\n// this is a hack JROESCH\nval abyte (b:byte) : lbytes 1\n (* admit () create 1ul b *)\n\nval twobytes (b:byte*byte) : lbytes 2\n // init 2ul (fun i -> if i = 0ul then fst b else snd b)\n\n(** appending bytes **)\nval append:\n b1:bytes\n -> b2:bytes\n -> Pure bytes\n (requires (UInt.size (length b1 + length b2) U32.n))\n (ensures (fun b -> reveal b == S.append (reveal b1) (reveal b2)))\nunfold let op_At_Bar = append\n\nval slice:\n b:bytes\n -> s:u32\n -> e:u32{U32.(s <=^ e) /\\ U32.v e <= length b}\n -> r:bytes{reveal r == Seq.slice (reveal b) (U32.v s) (U32.v e)}\nlet slice_ b (s:nat) (e:nat{s <= e /\\ e <= length b}) = slice b (U32.uint_to_t s) (U32.uint_to_t e)\n\nval sub:\n b:bytes\n -> s:u32\n -> l:u32{U32.v s + U32.v l <= length b}\n -> r:bytes{reveal r == Seq.slice (reveal b) (U32.v s) (U32.v s + U32.v l)}\n\nval split:\n b:bytes\n -> k:u32{U32.v k <= length b}\n -> p:(bytes*bytes){\n let x, y = p in\n (reveal x, reveal y) == Seq.split (reveal b) (U32.v k)}\n\nunfold let split_ b (k:nat{FStar.UInt.size k U32.n /\\ k < length b}) = split b (U32.uint_to_t k)\n\n(** Interpret a sequence of bytes as a mathematical integer encoded in big endian **)\nlet fits_in_k_bytes (n:nat) (k:nat) = FStar.UInt.size n (op_Multiply 8 k)\ntype uint_k (k:nat) = n:nat{fits_in_k_bytes n k}\n\n(** repr_bytes n: The number of bytes needed to represent a nat **)\nval repr_bytes:\n n:nat\n -> k:pos{fits_in_k_bytes n k}\n\nval lemma_repr_bytes_values:\n n:nat\n -> Lemma (ensures ( let k = repr_bytes n in\n if n < 256 then k==1\n else if n < 65536 then k==2\n else if n < 16777216 then k==3\n else if n < 4294967296 then k==4\n else if n < 1099511627776 then k==5\n else if n < 281474976710656 then k==6\n else if n < 72057594037927936 then k==7\n else if n < 18446744073709551616 then k==8\n else True ))\n [SMTPat (repr_bytes n)]\n\nval repr_bytes_size:\n k:nat\n -> n:uint_k k\n -> Lemma (ensures (repr_bytes n <= k))\n [SMTPat (fits_in_k_bytes n k)]\n\nval int_of_bytes:\n b:bytes\n -> Tot (uint_k (length b))\n\nval bytes_of_int:\n k:nat\n -> n:nat{repr_bytes n <= k /\\ k < pow2 32}\n -> lbytes k\n\nval int_of_bytes_of_int:\n #k:nat{k <= 32}\n -> n:uint_k k\n -> Lemma (ensures (int_of_bytes (bytes_of_int k n) == n))\n [SMTPat (bytes_of_int k n)]\n\nval bytes_of_int_of_bytes:\n b:bytes{length b <= 32}\n -> Lemma (ensures (bytes_of_int (length b) (int_of_bytes b) == b))\n [SMTPat (int_of_bytes b)]\n\n//18-02-25 use [uint32] instead of [int32] etc?\nval int32_of_bytes:\n b:bytes{length b <= 4}\n -> n:u32{U32.v n == int_of_bytes b}\n\nval int16_of_bytes:\n b:bytes{length b <= 2}\n -> n:u16{U16.v n == int_of_bytes b}\n\nval int8_of_bytes:\n b:bytes{length b = 1}\n -> n:u8{U8.v n = int_of_bytes b}\n\nval bytes_of_int32:\n n:U32.t\n -> b:lbytes 4{b == bytes_of_int 4 (U32.v n)}\n\nval bytes_of_int16:\n n:U16.t\n -> b:lbytes 2{b == bytes_of_int 2 (U16.v n)}\n\nval bytes_of_int8:\n n:U8.t\n -> b:lbytes 1{b == bytes_of_int 1 (U8.v n)}\n\n////////////////////////////////////////////////////////////////////////////////\ntype minbytes (n:nat) = b:bytes{length b >= n}\n\nval xor:\n n:u32\n -> b1:minbytes (U32.v n)\n -> b2:minbytes (U32.v n)\n -> b:bytes{len b = n}\n\nPartial implementation:\nlet xor_ (#n: nat{FStar.UInt.size n U32.n}) (b1 b2: minbytes n) =\n \n"
},
{
"role": "assistant",
"content": "xor (U32.uint_to_t n) b1 b2"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: b: FStar.Bytes.bytes -> k: Prims.nat{FStar.UInt.size k FStar.UInt32.n /\\ k < FStar.Bytes.length b}\n -> p:\n (FStar.Bytes.bytes * FStar.Bytes.bytes)\n { let _ = p in\n (let FStar.Pervasives.Native.Mktuple2 #_ #_ x y = _ in\n (FStar.Bytes.reveal x,\n FStar.Bytes.reveal y) ==\n FStar.Seq.Properties.split (FStar.Bytes.reveal b)\n (FStar.UInt32.v (FStar.UInt32.uint_to_t k)))\n <:\n Type0 }\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2008-2017 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n(*\nA standard library for manipulation of value bytes.\n\nThis model is realized by Bytes.bytes in OCaml and by\nstruct {uintX_t size; char *bytes} (or similar) in C.\n\nThis file is essentially a specialized version of FStar.Seq,\nwith lemmas and refinements taylored for typical operations on\nbytes, and with support for machine integers and C-extractible versions\n(which Seq does not provide.)\n\n@summary Value bytes standard library\n*)\nmodule FStar.Bytes\n\nmodule S = FStar.Seq\nmodule U = FStar.UInt\nmodule U8 = FStar.UInt8\nmodule U16 = FStar.UInt16\nmodule U32 = FStar.UInt32\nmodule U64 = FStar.UInt64\nmodule Str = FStar.String\nmodule Chr = FStar.Char\n\nunfold let u8 = U8.t\nunfold let u16 = U16.t\nunfold let u32 = U32.t\n\n(** Realized by uint8_t in C and int in OCaml (char does not have necessary operators...) *)\nunfold type byte = u8\n\n(** Realized in C by a pair of a length field and uint8_t* in C\n Realized in OCaml by a string *)\nval bytes : t:Type0{hasEq t}\nval len : bytes -> u32\n\nunfold let length b = FStar.UInt32.v (len b)\n\n(** representation for specs that need lemmas not defined here. *)\nval reveal:\n bytes\n -> GTot (S.seq byte)\n\nval length_reveal:\n x:bytes\n -> Lemma (ensures (S.length (reveal x) = length x))\n [SMTPatOr [[SMTPat (S.length (reveal x))];\n [SMTPat (len x)]]]\n\nval hide:\n s:S.seq byte{S.length s < pow2 32}\n -> GTot bytes\n\nval hide_reveal:\n x:bytes\n -> Lemma (ensures (hide (reveal x) = x))\n [SMTPat (reveal x)]\n\nval reveal_hide:\n x:S.seq byte{S.length x < pow2 32}\n -> Lemma (ensures (reveal (hide x) == x))\n [SMTPat (hide x)]\n\ntype lbytes (l:nat) = b:bytes{length b = l}\ntype kbytes (k:nat) = b:bytes{length b < pow2 k}\n\nlet lbytes32 (l:UInt32.t) = b:bytes{len b = l}\n\nval empty_bytes : lbytes 0\nval empty_unique:\n b:bytes\n -> Lemma (length b = 0 ==> b = empty_bytes)\n [SMTPat (len b)]\n\n(** If you statically know the length, it is OK to read at arbitrary indexes *)\nval get:\n b:bytes\n -> pos:u32{U32.v pos < length b}\n -> Pure byte\n (requires True)\n (ensures (fun y -> y == S.index (reveal b) (U32.v pos)))\n\nunfold let op_String_Access = get\n\nunfold let index (b:bytes) (i:nat{i < length b}) = get b (U32.uint_to_t i)\n\nlet equal b1 b2 =\n length b1 = length b2 /\\\n (forall (i:u32{U32.v i < length b1}).{:pattern (b1.[i]); (b2.[i])} b1.[i] == b2.[i])\n\nval extensionality:\n b1:bytes\n -> b2:bytes\n -> Lemma (requires (equal b1 b2))\n (ensures (b1 = b2))\n\n(** creating byte values **)\nval create:\n len:u32\n -> v:byte\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == v}\n\nunfold\nlet create_ (n:nat{FStar.UInt.size n U32.n}) v = create (U32.uint_to_t n) v\n\nval init:\n len:u32\n -> f:(i:u32{U32.(i <^ len)} -> byte)\n -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == f i}\n\n// this is a hack JROESCH\nval abyte (b:byte) : lbytes 1\n (* admit () create 1ul b *)\n\nval twobytes (b:byte*byte) : lbytes 2\n // init 2ul (fun i -> if i = 0ul then fst b else snd b)\n\n(** appending bytes **)\nval append:\n b1:bytes\n -> b2:bytes\n -> Pure bytes\n (requires (UInt.size (length b1 + length b2) U32.n))\n (ensures (fun b -> reveal b == S.append (reveal b1) (reveal b2)))\nunfold let op_At_Bar = append\n\nval slice:\n b:bytes\n -> s:u32\n -> e:u32{U32.(s <=^ e) /\\ U32.v e <= length b}\n -> r:bytes{reveal r == Seq.slice (reveal b) (U32.v s) (U32.v e)}\nlet slice_ b (s:nat) (e:nat{s <= e /\\ e <= length b}) = slice b (U32.uint_to_t s) (U32.uint_to_t e)\n\nval sub:\n b:bytes\n -> s:u32\n -> l:u32{U32.v s + U32.v l <= length b}\n -> r:bytes{reveal r == Seq.slice (reveal b) (U32.v s) (U32.v s + U32.v l)}\n\nval split:\n b:bytes\n -> k:u32{U32.v k <= length b}\n -> p:(bytes*bytes){\n let x, y = p in\n (reveal x, reveal y) == Seq.split (reveal b) (U32.v k)}\n\nPartial implementation:\nlet split_ b (k: nat{FStar.UInt.size k U32.n /\\ k < length b}) =\n \n"
},
{
"role": "assistant",
"content": "split b (U32.uint_to_t k)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: s: st -> DM4F_layered5.ST Prims.unit\nEffect: DM4F_layered5.ST\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nlet lift_id_st_wp #a #st (w : ID5.wp a) : wp st a =\n elim_pure_wp_monotonicity_forall ();\n fun s0 p -> w (fun x -> p x s0)\n\nlet lift_id_st a wp st (f : ID5.repr a wp)\n : repr a st (lift_id_st_wp wp)\n = fun s0 -> ID5.ID?.reflect (ID5.bind _ _ _ _ f (fun x -> ID5.return _ (x, s0)))\n\nsub_effect ID ~> ST = lift_id_st\n\nlet null #st #a : wp st a =\n fun s0 p -> forall x s1. p x s1\n\nlet get #st () : ST st st (fun s0 p -> p s0 s0) =\n ST?.reflect (fun s0 -> (s0, s0))\n\nPartial implementation:\nlet put #st (s: st) : ST unit st (fun _ p -> p () s) =\n \n"
},
{
"role": "assistant",
"content": "ST?.reflect (fun _ -> ((), s))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: _: Prims.unit -> DM4F_layered5.ST Prims.int\nEffect: DM4F_layered5.ST\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nlet lift_id_st_wp #a #st (w : ID5.wp a) : wp st a =\n elim_pure_wp_monotonicity_forall ();\n fun s0 p -> w (fun x -> p x s0)\n\nlet lift_id_st a wp st (f : ID5.repr a wp)\n : repr a st (lift_id_st_wp wp)\n = fun s0 -> ID5.ID?.reflect (ID5.bind _ _ _ _ f (fun x -> ID5.return _ (x, s0)))\n\nsub_effect ID ~> ST = lift_id_st\n\nlet null #st #a : wp st a =\n fun s0 p -> forall x s1. p x s1\n\nlet get #st () : ST st st (fun s0 p -> p s0 s0) =\n ST?.reflect (fun s0 -> (s0, s0))\n\nlet put #st (s:st) : ST unit st (fun _ p -> p () s) =\n ST?.reflect (fun _ -> ((), s))\n\nPartial implementation:\nlet test () : ST int int null =\n \n"
},
{
"role": "assistant",
"content": "let x = get () in\nput (x + x);\nget () + get ()"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n d: Pulse.Typing.st_typing g t c ->\n post:\n Pulse.Syntax.Base.term\n {Pulse.Syntax.Base.comp_post c == Pulse.Syntax.Base.tm_star post Pulse.Syntax.Base.tm_emp}\n -> Pulse.Typing.st_typing g t (Pulse.Checker.Base.comp_st_with_post c post)\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nPartial implementation:\nlet simplify_post\n (#g: env)\n (#t: st_term)\n (#c: comp_st)\n (d: st_typing g t c)\n (post: term{comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post) =\n \n"
},
{
"role": "assistant",
"content": "st_equiv_post d\n post\n (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src: Vale.X64.Machine_s.nat64 -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nPartial implementation:\nlet eval_Mov64 (src: nat64) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "Some src"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n va_mods: Vale.X64.QuickCode.va_mods_t ->\n alg: Vale.AES.AES_common_s.algorithm ->\n inout_b: Vale.X64.Memory.buffer128 ->\n key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->\n round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n keys_b: Vale.X64.Memory.buffer128 ->\n hkeys_b: Vale.X64.Memory.buffer128 ->\n total_bytes: Prims.nat ->\n old_hash: Vale.X64.Decls.quad32 ->\n completed_quads: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n h_LE: Vale.X64.Decls.quad32\n -> Vale.X64.QuickCode.va_quickCode Prims.unit\n (Vale.AES.X64.GCMdecryptOpt.va_code_Gcm_extra_bytes alg)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\nPartial implementation:\nlet va_qcode_Gcm_extra_bytes\n (va_mods: va_mods_t)\n (alg: algorithm)\n (inout_b: buffer128)\n (key: (seq nat32))\n (round_keys: (seq quad32))\n (keys_b hkeys_b: buffer128)\n (total_bytes: nat)\n (old_hash: quad32)\n (completed_quads: (seq quad32))\n (h_LE: quad32)\n : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n \n"
},
{
"role": "assistant",
"content": "(qblock va_mods\n (fun (va_s: va_state) ->\n let va_old_s:va_state = va_s in\n let len:(va_int_range 1 1) = 1 in\n va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5)\n (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax)\n 0\n Secret\n inout_b\n 0)\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0))\n (fun (va_s: va_state) _ ->\n let hash_input:quad32 = va_get_xmm 0 va_s in\n va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads)\n (fun (va_s: va_state) _ ->\n va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b)\n (FStar.Seq.Base.create #quad32 1 hash_input))\n (let snap:(FStar.Seq.Base.seq Vale.X64.Decls.quad32) =\n Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b\n in\n va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9))\n (fun (va_s: va_state) _ ->\n va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11\n va_s))\n key\n round_keys\n keys_b)\n (fun (va_s: va_state) _ ->\n va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_: unit) ->\n Vale.AES.AES_s.aes_encrypt_LE_reveal ())\n (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0))\n (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet\n 5)\n (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10)\n 0\n Secret\n inout_b\n 0)\n (fun (va_s: va_state) _ ->\n va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_: unit) ->\n Vale.AES.GCTR.gctr_partial_reveal ()\n )\n (va_QEmpty (()))))))))))))))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nPartial implementation:\nlet eval_Bswap64 (dst: nat64) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "Some (reverse_bytes_nat64 dst)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: x: Prims.int -> DM4F_layered5.ST Prims.unit\nEffect: DM4F_layered5.ST\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nlet lift_id_st_wp #a #st (w : ID5.wp a) : wp st a =\n elim_pure_wp_monotonicity_forall ();\n fun s0 p -> w (fun x -> p x s0)\n\nlet lift_id_st a wp st (f : ID5.repr a wp)\n : repr a st (lift_id_st_wp wp)\n = fun s0 -> ID5.ID?.reflect (ID5.bind _ _ _ _ f (fun x -> ID5.return _ (x, s0)))\n\nsub_effect ID ~> ST = lift_id_st\n\nlet null #st #a : wp st a =\n fun s0 p -> forall x s1. p x s1\n\nlet get #st () : ST st st (fun s0 p -> p s0 s0) =\n ST?.reflect (fun s0 -> (s0, s0))\n\nlet put #st (s:st) : ST unit st (fun _ p -> p () s) =\n ST?.reflect (fun _ -> ((), s))\n\n// this now works!!!\nlet test () : ST int int null =\n let x = get () in\n put (x + x);\n get () + get ()\n\nPartial implementation:\nlet addx (x: int) : ST unit int (fun s0 p -> p () (s0 + x)) =\n \n"
},
{
"role": "assistant",
"content": "let y = get () in\nput (x + y)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: _: Prims.unit -> DM4F_layered5.ST st\nEffect: DM4F_layered5.ST\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nlet lift_id_st_wp #a #st (w : ID5.wp a) : wp st a =\n elim_pure_wp_monotonicity_forall ();\n fun s0 p -> w (fun x -> p x s0)\n\nlet lift_id_st a wp st (f : ID5.repr a wp)\n : repr a st (lift_id_st_wp wp)\n = fun s0 -> ID5.ID?.reflect (ID5.bind _ _ _ _ f (fun x -> ID5.return _ (x, s0)))\n\nsub_effect ID ~> ST = lift_id_st\n\nlet null #st #a : wp st a =\n fun s0 p -> forall x s1. p x s1\n\nPartial implementation:\nlet get #st () : ST st st (fun s0 p -> p s0 s0) =\n \n"
},
{
"role": "assistant",
"content": "ST?.reflect (fun s0 -> (s0, s0))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64 -> carry: Prims.bool\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nPartial implementation:\nlet eval_Cmovc64 (dst src: nat64) (carry: bool) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "Some (if carry then src else dst)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n r: Pulse.Checker.Base.checker_result_t g ctxt (FStar.Pervasives.Native.Some post_hint) ->\n res_ppname: Pulse.Syntax.Base.ppname\n -> FStar.Tactics.Effect.Tac\n (Pulse.Typing.Combinators.st_typing_in_ctxt g ctxt (FStar.Pervasives.Native.Some post_hint))\nEffect: FStar.Tactics.Effect.Tac\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nlet simplify_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post)\n = st_equiv_post d post (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))\n\nlet simplify_lemma (c:comp_st) (c':comp_st) (post_hint:option post_hint_t)\n : Lemma\n (requires\n comp_post_matches_hint c post_hint /\\\n effect_annot_of_comp c == effect_annot_of_comp c' /\\\n comp_res c' == comp_res c /\\\n comp_u c' == comp_u c /\\\n comp_post c' == tm_star (comp_post c) tm_emp)\n (ensures comp_post_matches_hint (comp_st_with_post c' (comp_post c)) post_hint /\\\n comp_pre (comp_st_with_post c' (comp_post c)) == comp_pre c')\n = ()\n\nlet vprop_equiv_typing_bk (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g p ctxt)\n : tot_typing g p tm_vprop \n = let _, bk = vprop_equiv_typing d in\n bk ctxt_typing\n\nlet comp_with_pre (c:comp_st) (pre:term) =\n match c with\n | C_ST st -> C_ST { st with pre }\n | C_STGhost st -> C_STGhost { st with pre }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with pre}\n\n\nlet st_equiv_pre (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (pre:term)\n (veq: vprop_equiv g (comp_pre c) pre)\n : st_typing g t (comp_with_pre c pre)\n = if eq_tm pre (comp_pre c) then d\n else\n let c' = comp_with_pre c pre in\n let (| u_of, pre_typing, x, post_typing |) =\n Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) veq (VE_Refl _ _)\n in\n t_equiv d st_equiv\n\n\n#push-options \"--z3rlimit_factor 4 --ifuel 2 --fuel 0\"\nlet k_elab_equiv_continuation (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt #ctxt1 #ctxt2:term)\n (k:continuation_elaborator g1 ctxt g2 ctxt1)\n (d:vprop_equiv g2 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt g2 ctxt2 =\n fun post_hint res ->\n let (| st, c, st_d |) = res in\n let st_d : st_typing g2 st c = st_d in\n assert (comp_pre c == ctxt2);\n let st_d' : st_typing g2 st (comp_with_pre c ctxt1) = st_equiv_pre st_d _ (VE_Sym _ _ _ d) in\n k post_hint (| st, _, st_d' |)\n#pop-options\n\nlet vprop_equiv_typing_fwd (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g ctxt p)\n : tot_typing g p tm_vprop \n = let fwd, _ = vprop_equiv_typing d in\n fwd ctxt_typing\n\n#push-options \"--z3rlimit_factor 4 --ifuel 1 --fuel 0\"\nlet k_elab_equiv_prefix\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt2 #ctxt:term)\n (k:continuation_elaborator g1 ctxt1 g2 ctxt)\n (d:vprop_equiv g1 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt2 g2 ctxt =\n fun post_hint res ->\n let framing_token : frame_for_req_in_ctxt g1 ctxt2 ctxt1 = \n let d = VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Trans _ _ _ _ (VE_Unit _ _) d) in\n (| tm_emp, emp_typing, d |)\n in\n let res = k post_hint res in\n let (| st, c, st_d |) = res in\n assert (comp_pre c == ctxt1);\n (| _, _, st_equiv_pre st_d _ d |)\n #pop-options\n\nlet k_elab_equiv\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt1' #ctxt2 #ctxt2':term) \n (k:continuation_elaborator g1 ctxt1 g2 ctxt2)\n (d1:vprop_equiv g1 ctxt1 ctxt1')\n (d2:vprop_equiv g2 ctxt2 ctxt2')\n : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n \n let k : continuation_elaborator g1 ctxt1 g2 ctxt2' =\n k_elab_equiv_continuation k d2 in\n let k : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n k_elab_equiv_prefix k d1 in\n k\n\n#push-options \"--query_stats --fuel 2 --ifuel 2 --split_queries no --z3rlimit_factor 20\"\nopen Pulse.PP\nlet continuation_elaborator_with_bind (#g:env) (ctxt:term)\n (#c1:comp{stateful_comp c1})\n (#e1:st_term)\n (e1_typing:st_typing g e1 c1)\n (ctxt_pre1_typing:tot_typing g (tm_star ctxt (comp_pre c1)) tm_vprop)\n (x:nvar { None? (lookup g (snd x)) })\n : T.Tac (continuation_elaborator\n g\n (tm_star ctxt (comp_pre c1))\n (push_binding g (snd x) (fst x) (comp_res c1))\n (tm_star (open_term (comp_post c1) (snd x)) ctxt)) =\n\n let pre1 = comp_pre c1 in\n let res1 = comp_res c1 in\n let post1 = comp_post c1 in\n let ctxt_typing = star_typing_inversion_l ctxt_pre1_typing in\n // let p_prop = Metatheory.pure_typing_inversion pure_typing in\n let v_eq = VE_Comm g ctxt pre1 in\n let framing_token : frame_for_req_in_ctxt g (tm_star ctxt pre1) pre1 = \n (| ctxt, ctxt_typing, VE_Comm g pre1 ctxt |)\n in\n let (| c1, e1_typing |) =\n apply_frame ctxt_pre1_typing e1_typing framing_token in\n let (| u_of_1, pre_typing, _, _ |) =\n Metatheory.(st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness e1_typing))) in\n let b = res1 in\n let ppname, x = x in\n let g' = push_binding g x ppname b in\n \n let post1_opened = open_term_nv post1 (v_as_nv x) in\n let k : continuation_elaborator g (tm_star ctxt pre1) g' (tm_star post1_opened ctxt) =\n fun post_hint res ->\n let (| e2, c2, e2_typing |) = res in\n assert (comp_post_matches_hint c2 post_hint);\n let e2_typing : st_typing g' e2 c2 = e2_typing in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term e2_closed x == e2);\n assert (comp_pre c1 == (tm_star ctxt pre1));\n assert (comp_post c1 == tm_star post1 ctxt);\n assert (comp_pre c2 == tm_star post1_opened ctxt);\n assert (open_term (comp_post c1) x == tm_star post1_opened (open_term ctxt x));\n // ctxt is well-typed, hence ln\n assume (open_term ctxt x == ctxt);\n assert (open_term (comp_post c1) x == comp_pre c2);\n // we closed e2 with x\n assume (~ (x `Set.mem` freevars_st e2_closed));\n if x `Set.mem` freevars (comp_post c2)\n then fail g' None \"Impossible: freevar clash when constructing continuation elaborator for bind, please file a bug-report\"\n else (\n let t_typing, post_typing =\n Pulse.Typing.Combinators.bind_res_and_post_typing g c2 x post_hint in\n let g = push_context g \"mk_bind\" e1.range in\n // info_doc g None\n // [prefix 4 1 (doc_of_string \"mk_bind e1 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e1));\n // prefix 4 1 (doc_of_string \"mk_bind c1 = \") (pp #comp c1);\n // prefix 4 1 (doc_of_string \"mk_bind e2 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e2));\n // prefix 4 1 (doc_of_string \"mk_bind c2 = \") (pp #comp c2)]\n // ;\n let (| e, c, e_typing |) =\n Pulse.Typing.Combinators.mk_bind\n g (tm_star ctxt pre1) \n e1 e2_closed c1 c2 (ppname, x) e1_typing\n u_of_1 \n e2_typing\n t_typing\n post_typing\n (Some? post_hint)\n in\n (| e, c, e_typing |)\n )\n in\n k\n#pop-options\n\nmodule LN = Pulse.Typing.LN\n#push-options \"--z3rlimit_factor 4 --fuel 1 --ifuel 1\"\n\n\nlet st_comp_typing_with_post_hint \n (#g:env) (#ctxt:_)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (post_hint:post_hint_opt g { Some? post_hint })\n (c:comp_st { comp_pre c == ctxt /\\ comp_post_matches_hint c post_hint })\n: st_comp_typing g (st_comp_of_comp c)\n= let st = st_comp_of_comp c in\n let Some ph = post_hint in\n let post_typing_src\n : tot_typing (push_binding ph.g ph.x ppname_default ph.ret_ty)\n (open_term ph.post ph.x) tm_vprop\n = ph.post_typing_src\n in\n let x = fresh g in\n assume (fresh_wrt x g (freevars ph.post));\n assume (None? (lookup g ph.x));\n let post_typing_src\n : tot_typing (push_binding ph.g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = if x = ph.x\n then post_typing_src\n else \n let open Pulse.Typing.Metatheory.Base in\n let tt :\n tot_typing\n (push_binding ph.g x ppname_default ph.ret_ty)\n (subst_term (open_term ph.post ph.x) (renaming ph.x x))\n (subst_term tm_vprop (renaming ph.x x)) =\n tot_typing_renaming1 ph.g ph.x ph.ret_ty (open_term ph.post ph.x) tm_vprop post_typing_src x\n in\n assert (subst_term tm_vprop (renaming ph.x x) == tm_vprop);\n assume (subst_term (open_term ph.post ph.x) (renaming ph.x x) ==\n open_term ph.post x);\n tt\n in\n let post_typing_src\n : tot_typing (push_binding g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = //weakening: TODO\n RU.magic ()\n in\n let ty_typing : universe_of ph.g st.res st.u = ph.ty_typing in\n let ty_typing : universe_of g st.res st.u =\n Pulse.Typing.Metatheory.tot_typing_weakening_standard ph.g ty_typing g\n in\n assert (st.res == ph.ret_ty);\n assert (st.post == ph.post);\n STC g st x ty_typing ctxt_typing post_typing_src\n\nlet continuation_elaborator_with_bind_fn (#g:env) (#ctxt:term)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#e1:st_term)\n (#c1:comp { C_Tot? c1 })\n (b:binder{b.binder_ty == comp_res c1})\n (e1_typing:st_typing g e1 c1)\n (x:nvar { None? (lookup g (snd x)) })\n: T.Tac (continuation_elaborator\n g ctxt\n (push_binding g (snd x) ppname_default (comp_res c1)) ctxt)\n= let t1 = comp_res c1 in\n assert ((push_binding g (snd x) (fst x) t1) `env_extends` g);\n fun post_hint (| e2, c2, d2 |) ->\n if None? post_hint then T.fail \"bind_fn: expects the post_hint to be set\";\n let ppname, x = x in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term (close_st_term e2 x) x == e2);\n let e = wr c2 (Tm_Bind {binder=b; head=e1; body=e2_closed}) in\n let (| u, c1_typing |) = Pulse.Typing.Metatheory.Base.st_typing_correctness_ctot e1_typing in\n let c2_typing : comp_typing g c2 (universe_of_comp c2) =\n match c2 with\n | C_ST st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_ST _ _ stc\n \n | C_STAtomic i obs st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n let i_typing = CP.core_check_term g i T.E_Total tm_inames in\n CT_STAtomic _ _ obs _ i_typing stc\n\n | C_STGhost st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_STGhost _ _ stc\n in\n let d : st_typing g e c2 =\n T_BindFn g e1 e2_closed c1 c2 b x e1_typing u c1_typing d2 c2_typing\n in\n (| e, c2, d |)\n\nlet rec check_equiv_emp (g:env) (vp:term)\n : option (vprop_equiv g vp tm_emp)\n = match vp.t with\n | Tm_Emp -> Some (VE_Refl _ _)\n | Tm_Star vp1 vp2 ->\n (match check_equiv_emp g vp1, check_equiv_emp g vp2 with\n | Some d1, Some d2 ->\n let d3 : vprop_equiv g (tm_star vp1 vp2) (tm_star tm_emp tm_emp)\n = VE_Ctxt _ _ _ _ _ d1 d2 in\n let d4 : vprop_equiv g (tm_star tm_emp tm_emp) tm_emp =\n VE_Unit _ _ in\n Some (VE_Trans _ _ _ _ d3 d4)\n | _, _ -> None)\n | _ -> None\n\n\n\nlet emp_inames_included (g:env) (i:term) (_:tot_typing g i tm_inames)\n: prop_validity g (tm_inames_subset tm_emp_inames i)\n= RU.magic()\n\nlet return_in_ctxt (g:env) (y:var) (y_ppname:ppname) (u:universe) (ty:term) (ctxt:vprop)\n (ty_typing:universe_of g ty u)\n (post_hint0:post_hint_opt g { Some? post_hint0 /\\ checker_res_matches_post_hint g post_hint0 y ty ctxt})\n: Pure (st_typing_in_ctxt g ctxt post_hint0)\n (requires lookup g y == Some ty)\n (ensures fun _ -> True)\n= let Some post_hint = post_hint0 in\n let x = fresh g in\n assume (~ (x `Set.mem` freevars post_hint.post));\n let ctag =\n match post_hint.effect_annot with\n | EffectAnnotAtomic _ -> STT_Atomic\n | EffectAnnotGhost -> STT_Ghost\n | _ -> STT\n in\n let y_tm = tm_var {nm_index=y;nm_ppname=y_ppname} in\n let d = T_Return g ctag false u ty y_tm post_hint.post x ty_typing\n (RU.magic ()) // that null_var y is well typed at ty in g, we know since lookup g y == Some ty\n (RU.magic ()) // typing of (open post x) in (g, x) ... post_hint is well-typed, so should get\n in\n let t = wtag (Some ctag) (Tm_Return {expected_type=tm_unknown;insert_eq=false;term=y_tm}) in\n let c = comp_return ctag false u ty y_tm post_hint.post x in\n let d : st_typing g t c = d in\n assume (comp_u c == post_hint.u); // this u should follow from equality of t\n match c, post_hint.effect_annot with\n | C_STAtomic _ obs _, EffectAnnotAtomic { opens } ->\n assert (comp_inames c == tm_emp_inames);\n let pht = post_hint_typing g post_hint x in\n let validity = emp_inames_included g opens pht.effect_annot_typing in\n let d = T_Sub _ _ _ _ d (STS_AtomicInvs _ (st_comp_of_comp c) tm_emp_inames opens obs obs validity) in\n (| _, _, d |)\n | _ -> \n (| _, _, d |)\n\nlet match_comp_res_with_post_hint (#g:env) (#t:st_term) (#c:comp_st)\n (d:st_typing g t c)\n (post_hint:post_hint_opt g)\n : T.Tac (t':st_term &\n c':comp_st &\n st_typing g t' c') =\n\n match post_hint with\n | None -> (| t, c, d |)\n | Some { ret_ty } ->\n let cres = comp_res c in\n if eq_tm cres ret_ty\n then (| t, c, d |)\n else match Pulse.Checker.Pure.check_equiv g cres ret_ty with\n | None ->\n fail g (Some t.range)\n (Printf.sprintf \"Could not prove equiv for computed type %s and expected type %s\"\n (P.term_to_string cres)\n (P.term_to_string ret_ty))\n | Some tok ->\n let d_equiv\n : RT.equiv _ (elab_term cres) (elab_term ret_ty) =\n RT.Rel_eq_token _ _ _ (FStar.Squash.return_squash tok) in\n \n let c' = with_st_comp c {(st_comp_of_comp c) with res = ret_ty } in\n let (| cres_typing, cpre_typing, x, cpost_typing |) =\n st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness d)) in\n\n let d_stequiv : st_equiv g c c' =\n ST_VPropEquiv _ c c' _ cpre_typing cres_typing cpost_typing d_equiv (VE_Refl _ _) (VE_Refl _ _)\n in\n\n (| t, c', T_Equiv _ _ _ _ d d_stequiv |)\n\nPartial implementation:\nlet apply_checker_result_k\n (#g: env)\n (#ctxt: vprop)\n (#post_hint: post_hint_for_env g)\n (r: checker_result_t g ctxt (Some post_hint))\n (res_ppname: ppname)\n : T.Tac (st_typing_in_ctxt g ctxt (Some post_hint)) =\n \n"
},
{
"role": "assistant",
"content": "let (| y , g1 , (| u_ty , ty_y , d_ty_y |) , (| pre' , _ |) , k |) = r in\nlet (| u_ty_y , d_ty_y |) = Pulse.Checker.Pure.check_universe g1 ty_y in\nlet d:st_typing_in_ctxt g1 pre' (Some post_hint) =\n return_in_ctxt g1 y res_ppname u_ty_y ty_y pre' d_ty_y (Some post_hint)\nin\nk (Some post_hint) d"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n g: Pulse.Typing.Env.env ->\n e: Pulse.Syntax.Base.term ->\n eff: FStar.Stubs.TypeChecker.Core.tot_or_ghost ->\n t0: Pulse.Syntax.Base.term ->\n d: Pulse.Typing.typing g e eff t0 ->\n steps: Prims.list FStar.Pervasives.norm_step\n -> FStar.Tactics.Effect.Tac\n (Prims.dtuple2 Pulse.Syntax.Base.term (fun t' -> Pulse.Typing.typing g e eff t'))\nEffect: FStar.Tactics.Effect.Tac\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nlet simplify_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post)\n = st_equiv_post d post (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))\n\nlet simplify_lemma (c:comp_st) (c':comp_st) (post_hint:option post_hint_t)\n : Lemma\n (requires\n comp_post_matches_hint c post_hint /\\\n effect_annot_of_comp c == effect_annot_of_comp c' /\\\n comp_res c' == comp_res c /\\\n comp_u c' == comp_u c /\\\n comp_post c' == tm_star (comp_post c) tm_emp)\n (ensures comp_post_matches_hint (comp_st_with_post c' (comp_post c)) post_hint /\\\n comp_pre (comp_st_with_post c' (comp_post c)) == comp_pre c')\n = ()\n\nlet vprop_equiv_typing_bk (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g p ctxt)\n : tot_typing g p tm_vprop \n = let _, bk = vprop_equiv_typing d in\n bk ctxt_typing\n\nlet comp_with_pre (c:comp_st) (pre:term) =\n match c with\n | C_ST st -> C_ST { st with pre }\n | C_STGhost st -> C_STGhost { st with pre }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with pre}\n\n\nlet st_equiv_pre (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (pre:term)\n (veq: vprop_equiv g (comp_pre c) pre)\n : st_typing g t (comp_with_pre c pre)\n = if eq_tm pre (comp_pre c) then d\n else\n let c' = comp_with_pre c pre in\n let (| u_of, pre_typing, x, post_typing |) =\n Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) veq (VE_Refl _ _)\n in\n t_equiv d st_equiv\n\n\n#push-options \"--z3rlimit_factor 4 --ifuel 2 --fuel 0\"\nlet k_elab_equiv_continuation (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt #ctxt1 #ctxt2:term)\n (k:continuation_elaborator g1 ctxt g2 ctxt1)\n (d:vprop_equiv g2 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt g2 ctxt2 =\n fun post_hint res ->\n let (| st, c, st_d |) = res in\n let st_d : st_typing g2 st c = st_d in\n assert (comp_pre c == ctxt2);\n let st_d' : st_typing g2 st (comp_with_pre c ctxt1) = st_equiv_pre st_d _ (VE_Sym _ _ _ d) in\n k post_hint (| st, _, st_d' |)\n#pop-options\n\nlet vprop_equiv_typing_fwd (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g ctxt p)\n : tot_typing g p tm_vprop \n = let fwd, _ = vprop_equiv_typing d in\n fwd ctxt_typing\n\n#push-options \"--z3rlimit_factor 4 --ifuel 1 --fuel 0\"\nlet k_elab_equiv_prefix\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt2 #ctxt:term)\n (k:continuation_elaborator g1 ctxt1 g2 ctxt)\n (d:vprop_equiv g1 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt2 g2 ctxt =\n fun post_hint res ->\n let framing_token : frame_for_req_in_ctxt g1 ctxt2 ctxt1 = \n let d = VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Trans _ _ _ _ (VE_Unit _ _) d) in\n (| tm_emp, emp_typing, d |)\n in\n let res = k post_hint res in\n let (| st, c, st_d |) = res in\n assert (comp_pre c == ctxt1);\n (| _, _, st_equiv_pre st_d _ d |)\n #pop-options\n\nlet k_elab_equiv\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt1' #ctxt2 #ctxt2':term) \n (k:continuation_elaborator g1 ctxt1 g2 ctxt2)\n (d1:vprop_equiv g1 ctxt1 ctxt1')\n (d2:vprop_equiv g2 ctxt2 ctxt2')\n : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n \n let k : continuation_elaborator g1 ctxt1 g2 ctxt2' =\n k_elab_equiv_continuation k d2 in\n let k : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n k_elab_equiv_prefix k d1 in\n k\n\n#push-options \"--query_stats --fuel 2 --ifuel 2 --split_queries no --z3rlimit_factor 20\"\nopen Pulse.PP\nlet continuation_elaborator_with_bind (#g:env) (ctxt:term)\n (#c1:comp{stateful_comp c1})\n (#e1:st_term)\n (e1_typing:st_typing g e1 c1)\n (ctxt_pre1_typing:tot_typing g (tm_star ctxt (comp_pre c1)) tm_vprop)\n (x:nvar { None? (lookup g (snd x)) })\n : T.Tac (continuation_elaborator\n g\n (tm_star ctxt (comp_pre c1))\n (push_binding g (snd x) (fst x) (comp_res c1))\n (tm_star (open_term (comp_post c1) (snd x)) ctxt)) =\n\n let pre1 = comp_pre c1 in\n let res1 = comp_res c1 in\n let post1 = comp_post c1 in\n let ctxt_typing = star_typing_inversion_l ctxt_pre1_typing in\n // let p_prop = Metatheory.pure_typing_inversion pure_typing in\n let v_eq = VE_Comm g ctxt pre1 in\n let framing_token : frame_for_req_in_ctxt g (tm_star ctxt pre1) pre1 = \n (| ctxt, ctxt_typing, VE_Comm g pre1 ctxt |)\n in\n let (| c1, e1_typing |) =\n apply_frame ctxt_pre1_typing e1_typing framing_token in\n let (| u_of_1, pre_typing, _, _ |) =\n Metatheory.(st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness e1_typing))) in\n let b = res1 in\n let ppname, x = x in\n let g' = push_binding g x ppname b in\n \n let post1_opened = open_term_nv post1 (v_as_nv x) in\n let k : continuation_elaborator g (tm_star ctxt pre1) g' (tm_star post1_opened ctxt) =\n fun post_hint res ->\n let (| e2, c2, e2_typing |) = res in\n assert (comp_post_matches_hint c2 post_hint);\n let e2_typing : st_typing g' e2 c2 = e2_typing in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term e2_closed x == e2);\n assert (comp_pre c1 == (tm_star ctxt pre1));\n assert (comp_post c1 == tm_star post1 ctxt);\n assert (comp_pre c2 == tm_star post1_opened ctxt);\n assert (open_term (comp_post c1) x == tm_star post1_opened (open_term ctxt x));\n // ctxt is well-typed, hence ln\n assume (open_term ctxt x == ctxt);\n assert (open_term (comp_post c1) x == comp_pre c2);\n // we closed e2 with x\n assume (~ (x `Set.mem` freevars_st e2_closed));\n if x `Set.mem` freevars (comp_post c2)\n then fail g' None \"Impossible: freevar clash when constructing continuation elaborator for bind, please file a bug-report\"\n else (\n let t_typing, post_typing =\n Pulse.Typing.Combinators.bind_res_and_post_typing g c2 x post_hint in\n let g = push_context g \"mk_bind\" e1.range in\n // info_doc g None\n // [prefix 4 1 (doc_of_string \"mk_bind e1 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e1));\n // prefix 4 1 (doc_of_string \"mk_bind c1 = \") (pp #comp c1);\n // prefix 4 1 (doc_of_string \"mk_bind e2 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e2));\n // prefix 4 1 (doc_of_string \"mk_bind c2 = \") (pp #comp c2)]\n // ;\n let (| e, c, e_typing |) =\n Pulse.Typing.Combinators.mk_bind\n g (tm_star ctxt pre1) \n e1 e2_closed c1 c2 (ppname, x) e1_typing\n u_of_1 \n e2_typing\n t_typing\n post_typing\n (Some? post_hint)\n in\n (| e, c, e_typing |)\n )\n in\n k\n#pop-options\n\nmodule LN = Pulse.Typing.LN\n#push-options \"--z3rlimit_factor 4 --fuel 1 --ifuel 1\"\n\n\nlet st_comp_typing_with_post_hint \n (#g:env) (#ctxt:_)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (post_hint:post_hint_opt g { Some? post_hint })\n (c:comp_st { comp_pre c == ctxt /\\ comp_post_matches_hint c post_hint })\n: st_comp_typing g (st_comp_of_comp c)\n= let st = st_comp_of_comp c in\n let Some ph = post_hint in\n let post_typing_src\n : tot_typing (push_binding ph.g ph.x ppname_default ph.ret_ty)\n (open_term ph.post ph.x) tm_vprop\n = ph.post_typing_src\n in\n let x = fresh g in\n assume (fresh_wrt x g (freevars ph.post));\n assume (None? (lookup g ph.x));\n let post_typing_src\n : tot_typing (push_binding ph.g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = if x = ph.x\n then post_typing_src\n else \n let open Pulse.Typing.Metatheory.Base in\n let tt :\n tot_typing\n (push_binding ph.g x ppname_default ph.ret_ty)\n (subst_term (open_term ph.post ph.x) (renaming ph.x x))\n (subst_term tm_vprop (renaming ph.x x)) =\n tot_typing_renaming1 ph.g ph.x ph.ret_ty (open_term ph.post ph.x) tm_vprop post_typing_src x\n in\n assert (subst_term tm_vprop (renaming ph.x x) == tm_vprop);\n assume (subst_term (open_term ph.post ph.x) (renaming ph.x x) ==\n open_term ph.post x);\n tt\n in\n let post_typing_src\n : tot_typing (push_binding g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = //weakening: TODO\n RU.magic ()\n in\n let ty_typing : universe_of ph.g st.res st.u = ph.ty_typing in\n let ty_typing : universe_of g st.res st.u =\n Pulse.Typing.Metatheory.tot_typing_weakening_standard ph.g ty_typing g\n in\n assert (st.res == ph.ret_ty);\n assert (st.post == ph.post);\n STC g st x ty_typing ctxt_typing post_typing_src\n\nlet continuation_elaborator_with_bind_fn (#g:env) (#ctxt:term)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#e1:st_term)\n (#c1:comp { C_Tot? c1 })\n (b:binder{b.binder_ty == comp_res c1})\n (e1_typing:st_typing g e1 c1)\n (x:nvar { None? (lookup g (snd x)) })\n: T.Tac (continuation_elaborator\n g ctxt\n (push_binding g (snd x) ppname_default (comp_res c1)) ctxt)\n= let t1 = comp_res c1 in\n assert ((push_binding g (snd x) (fst x) t1) `env_extends` g);\n fun post_hint (| e2, c2, d2 |) ->\n if None? post_hint then T.fail \"bind_fn: expects the post_hint to be set\";\n let ppname, x = x in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term (close_st_term e2 x) x == e2);\n let e = wr c2 (Tm_Bind {binder=b; head=e1; body=e2_closed}) in\n let (| u, c1_typing |) = Pulse.Typing.Metatheory.Base.st_typing_correctness_ctot e1_typing in\n let c2_typing : comp_typing g c2 (universe_of_comp c2) =\n match c2 with\n | C_ST st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_ST _ _ stc\n \n | C_STAtomic i obs st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n let i_typing = CP.core_check_term g i T.E_Total tm_inames in\n CT_STAtomic _ _ obs _ i_typing stc\n\n | C_STGhost st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_STGhost _ _ stc\n in\n let d : st_typing g e c2 =\n T_BindFn g e1 e2_closed c1 c2 b x e1_typing u c1_typing d2 c2_typing\n in\n (| e, c2, d |)\n\nlet rec check_equiv_emp (g:env) (vp:term)\n : option (vprop_equiv g vp tm_emp)\n = match vp.t with\n | Tm_Emp -> Some (VE_Refl _ _)\n | Tm_Star vp1 vp2 ->\n (match check_equiv_emp g vp1, check_equiv_emp g vp2 with\n | Some d1, Some d2 ->\n let d3 : vprop_equiv g (tm_star vp1 vp2) (tm_star tm_emp tm_emp)\n = VE_Ctxt _ _ _ _ _ d1 d2 in\n let d4 : vprop_equiv g (tm_star tm_emp tm_emp) tm_emp =\n VE_Unit _ _ in\n Some (VE_Trans _ _ _ _ d3 d4)\n | _, _ -> None)\n | _ -> None\n\n\n\nlet emp_inames_included (g:env) (i:term) (_:tot_typing g i tm_inames)\n: prop_validity g (tm_inames_subset tm_emp_inames i)\n= RU.magic()\n\nlet return_in_ctxt (g:env) (y:var) (y_ppname:ppname) (u:universe) (ty:term) (ctxt:vprop)\n (ty_typing:universe_of g ty u)\n (post_hint0:post_hint_opt g { Some? post_hint0 /\\ checker_res_matches_post_hint g post_hint0 y ty ctxt})\n: Pure (st_typing_in_ctxt g ctxt post_hint0)\n (requires lookup g y == Some ty)\n (ensures fun _ -> True)\n= let Some post_hint = post_hint0 in\n let x = fresh g in\n assume (~ (x `Set.mem` freevars post_hint.post));\n let ctag =\n match post_hint.effect_annot with\n | EffectAnnotAtomic _ -> STT_Atomic\n | EffectAnnotGhost -> STT_Ghost\n | _ -> STT\n in\n let y_tm = tm_var {nm_index=y;nm_ppname=y_ppname} in\n let d = T_Return g ctag false u ty y_tm post_hint.post x ty_typing\n (RU.magic ()) // that null_var y is well typed at ty in g, we know since lookup g y == Some ty\n (RU.magic ()) // typing of (open post x) in (g, x) ... post_hint is well-typed, so should get\n in\n let t = wtag (Some ctag) (Tm_Return {expected_type=tm_unknown;insert_eq=false;term=y_tm}) in\n let c = comp_return ctag false u ty y_tm post_hint.post x in\n let d : st_typing g t c = d in\n assume (comp_u c == post_hint.u); // this u should follow from equality of t\n match c, post_hint.effect_annot with\n | C_STAtomic _ obs _, EffectAnnotAtomic { opens } ->\n assert (comp_inames c == tm_emp_inames);\n let pht = post_hint_typing g post_hint x in\n let validity = emp_inames_included g opens pht.effect_annot_typing in\n let d = T_Sub _ _ _ _ d (STS_AtomicInvs _ (st_comp_of_comp c) tm_emp_inames opens obs obs validity) in\n (| _, _, d |)\n | _ -> \n (| _, _, d |)\n\nlet match_comp_res_with_post_hint (#g:env) (#t:st_term) (#c:comp_st)\n (d:st_typing g t c)\n (post_hint:post_hint_opt g)\n : T.Tac (t':st_term &\n c':comp_st &\n st_typing g t' c') =\n\n match post_hint with\n | None -> (| t, c, d |)\n | Some { ret_ty } ->\n let cres = comp_res c in\n if eq_tm cres ret_ty\n then (| t, c, d |)\n else match Pulse.Checker.Pure.check_equiv g cres ret_ty with\n | None ->\n fail g (Some t.range)\n (Printf.sprintf \"Could not prove equiv for computed type %s and expected type %s\"\n (P.term_to_string cres)\n (P.term_to_string ret_ty))\n | Some tok ->\n let d_equiv\n : RT.equiv _ (elab_term cres) (elab_term ret_ty) =\n RT.Rel_eq_token _ _ _ (FStar.Squash.return_squash tok) in\n \n let c' = with_st_comp c {(st_comp_of_comp c) with res = ret_ty } in\n let (| cres_typing, cpre_typing, x, cpost_typing |) =\n st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness d)) in\n\n let d_stequiv : st_equiv g c c' =\n ST_VPropEquiv _ c c' _ cpre_typing cres_typing cpost_typing d_equiv (VE_Refl _ _) (VE_Refl _ _)\n in\n\n (| t, c', T_Equiv _ _ _ _ d d_stequiv |)\n\nlet apply_checker_result_k (#g:env) (#ctxt:vprop) (#post_hint:post_hint_for_env g)\n (r:checker_result_t g ctxt (Some post_hint))\n (res_ppname:ppname)\n : T.Tac (st_typing_in_ctxt g ctxt (Some post_hint)) =\n\n // TODO: FIXME add to checker result type?\n let (| y, g1, (| u_ty, ty_y, d_ty_y |), (| pre', _ |), k |) = r in\n\n let (| u_ty_y, d_ty_y |) = Pulse.Checker.Pure.check_universe g1 ty_y in\n\n let d : st_typing_in_ctxt g1 pre' (Some post_hint) =\n return_in_ctxt g1 y res_ppname u_ty_y ty_y pre' d_ty_y (Some post_hint) in\n\n k (Some post_hint) d\n\n#push-options \"--z3rlimit_factor 4 --fuel 0 --ifuel 1 --query_stats\"\nlet checker_result_for_st_typing (#g:env) (#ctxt:vprop) (#post_hint:post_hint_opt g)\n (d:st_typing_in_ctxt g ctxt post_hint)\n (ppname:ppname)\n : T.Tac (checker_result_t g ctxt post_hint) =\n\n let (| t, c, d |) = d in\n \n let x = fresh g in\n\n let g' = push_binding g x ppname (comp_res c) in\n let ctxt' = open_term_nv (comp_post c) (ppname, x) in\n let k\n : continuation_elaborator\n g (tm_star tm_emp (comp_pre c))\n g' (tm_star ctxt' tm_emp) =\n continuation_elaborator_with_bind tm_emp d (RU.magic ()) (ppname, x) in\n let k\n : continuation_elaborator g (comp_pre c) g' ctxt' =\n k_elab_equiv k (RU.magic ()) (RU.magic ()) in\n\n let _ : squash (checker_res_matches_post_hint g post_hint x (comp_res c) ctxt') =\n match post_hint with\n | None -> ()\n | Some post_hint -> () in\n\n assert (g' `env_extends` g);\n\n let comp_res_typing, _, f =\n Metatheory.(st_comp_typing_inversion_cofinite (fst <| comp_typing_inversion (st_typing_correctness d))) in\n\n // RU.magic is the typing of comp_res in g'\n // weaken comp_res_typing\n\n assume (~ (x `Set.mem` freevars (comp_post c)));\n let tt : universe_of _ _ _ = RU.magic () in\n (| x, g', (| comp_u c, comp_res c, tt |), (| ctxt', f x |), k |)\n#pop-options\n\nmodule R = FStar.Reflection.V2\n\nlet readback_comp_res_as_comp (c:T.comp) : option comp =\n match c with\n | T.C_Total t -> (\n match readback_comp t with\n | None -> None\n | Some c -> Some c\n )\n | _ -> None\n\nlet rec is_stateful_arrow (g:env) (c:option comp) (args:list T.argv) (out:list T.argv)\n : T.Tac (option (list T.argv & T.argv))\n = let open R in\n match c with\n | None -> None\n | Some (C_ST _)\n | Some (C_STGhost _)\n | Some (C_STAtomic _ _ _) -> (\n match args, out with\n | [], hd::tl -> Some (List.rev tl, hd)\n | _ -> None //leftover or not enough args\n )\n\n | Some (C_Tot c_res) -> (\n if not (Tm_FStar? c_res.t)\n then None\n else (\n let Tm_FStar c_res = c_res.t in\n let ht = T.inspect c_res in\n match ht with\n | T.Tv_Arrow b c -> (\n match args with\n | [] -> ( //no more args; check that only implicits remain, ending in an stateful comp \n let bs, c = T.collect_arr_ln_bs c_res in\n if List.Tot.for_all (fun b -> R.Q_Implicit? (R.inspect_binder b).qual) bs\n then is_stateful_arrow g (readback_comp_res_as_comp (R.inspect_comp c)) [] out\n else None //too few args \n )\n\n | (arg, qual)::args' -> ( //check that this arg qual matches the binder and recurse accordingly\n match b.qual, qual with\n | T.Q_Meta _, T.Q_Implicit\n | T.Q_Implicit, T.Q_Implicit \n | T.Q_Explicit, T.Q_Explicit -> //consume this argument\n is_stateful_arrow g (readback_comp_res_as_comp c) args' ((arg, qual)::out)\n\n | T.Q_Meta _, T.Q_Explicit\n | T.Q_Implicit, T.Q_Explicit -> \n //don't consume this argument\n is_stateful_arrow g (readback_comp_res_as_comp c) args out\n\n | _ -> None //incompatible qualifiers; bail\n )\n )\n | _ ->\n let c_res' = RU.whnf_lax (elab_env g) c_res in\n let ht = T.inspect c_res' in\n if T.Tv_Arrow? ht\n then (\n assume (not_tv_unknown c_res');\n let c_res' = tm_fstar c_res' (T.range_of_term c_res') in\n is_stateful_arrow g (Some (C_Tot c_res')) args out\n )\n else None\n )\n )\n\nmodule RU = Pulse.RuntimeUtils \nlet is_stateful_application (g:env) (e:term) \n : T.Tac (option st_term)\n = match e.t with\n | Tm_FStar host_term -> (\n let head, args = T.collect_app_ln host_term in\n assume (not_tv_unknown head);\n match RU.lax_check_term_with_unknown_universes (elab_env g) head with\n | None -> None\n | Some ht -> \n assume (not_tv_unknown ht);\n let head_t = tm_fstar ht (T.range_of_term ht) in\n match is_stateful_arrow g (Some (C_Tot head_t)) args [] with \n | None -> None\n | Some (applied_args, (last_arg, aqual))->\n let head = T.mk_app head applied_args in\n assume (not_tv_unknown head);\n let head = tm_fstar head (T.range_of_term head) in\n assume (not_tv_unknown last_arg);\n let last_arg = tm_fstar last_arg (T.range_of_term last_arg) in\n let qual = \n match aqual with\n | T.Q_Implicit -> Some Implicit\n | _ -> None\n in\n let st_app = Tm_STApp { head; arg=last_arg; arg_qual=qual} in\n let st_app = { term = st_app; range=e.range; effect_tag=default_effect_hint } in\n Some st_app\n )\n | _ -> None\n\n\nlet apply_conversion\n (#g:env) (#e:term) (#eff:_) (#t0:term)\n (d:typing g e eff t0)\n (#t1:term)\n (eq:Ghost.erased (RT.related (elab_env g) (elab_term t0) RT.R_Eq (elab_term t1)))\n : typing g e eff t1\n = let d : RT.typing (elab_env g) (elab_term e) (eff, (elab_term t0)) = d._0 in\n let r : RT.related (elab_env g) (elab_term t0) RT.R_Eq (elab_term t1) = eq in\n let r = RT.Rel_equiv _ _ _ RT.R_Sub r in\n let s : RT.sub_comp (elab_env g) (eff, (elab_term t0)) (eff, elab_term t1) = \n RT.Relc_typ _ _ _ _ _ r\n in\n E (RT.T_Sub _ _ _ _ d s)\n\nPartial implementation:\nlet norm_typing\n (g: env)\n (e: term)\n (eff: _)\n (t0: term)\n (d: typing g e eff t0)\n (steps: list norm_step)\n : T.Tac (t': term & typing g e eff t') =\n \n"
},
{
"role": "assistant",
"content": "let t = elab_term t0 in\nlet u_t_typing:Ghost.erased (u: R.universe & RT.typing _ _ _) =\n Pulse.Typing.Metatheory.Base.typing_correctness d._0\nin\nlet (| t' , t'_typing , related_t_t' |) =\n Pulse.RuntimeUtils.norm_well_typed_term (dsnd u_t_typing) steps\nin\nmatch Pulse.Readback.readback_ty t' with\n| None -> T.fail \"Could not readback normalized type\"\n| Some t'' ->\n let d:typing g e eff t'' = apply_conversion d related_t_t' in\n (| t'', d |)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src: Vale.X64.Machine_s.nat64 -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nPartial implementation:\nlet eval_MovBe64 (src: nat64) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "if movbe_enabled then Some (reverse_bytes_nat64 src) else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: x: Prims.int -> y: Prims.int -> DM4F_layered5.ST Prims.int\nEffect: DM4F_layered5.ST\n\nContext:\n(*\n Copyright 2008-2018 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule DM4F_layered5\n\n(* Same as DM4F, but layered over a layered PURE without monotonicity *)\nopen ID5\nopen DM4F_Utils\n\nunfold\nlet pure_bind_wp (#a #b : Type) (w1 : ID5.wp a) (w2 : a -> ID5.wp b) : ID5.wp b =\n ID5.bind_wp w1 w2\n\n(* Simulating state effect in DM4F, hopefully doable by a tactic. *)\n\ntype post_t st a = a -> st -> Type0\n\ntype wp0 (st:Type u#0) (a:Type u#ua) : Type u#(max 1 ua) =\n st -> post_t st a -> Type0\n\nlet st_monotonic #st #a (w : wp0 st a) : Type0 =\n //forall s0 p1 p2. (forall r. p1 r ==> p2 r) ==> w s0 p1 ==> w s0 p2\n // ^ this version seems to be less SMT-friendly\n forall s0 p1 p2. (forall x s1. p1 x s1 ==> p2 x s1) ==> w s0 p1 ==> w s0 p2\n\ntype wp st a = w:(wp0 st a){st_monotonic w}\n\nopen FStar.Monotonic.Pure\n\ntype repr (a:Type u#ua) (st:Type0) (wp : wp u#ua st a) : Type u#(max 1 ua) =\n s0:st -> ID (a & st) (as_pure_wp (fun p -> wp s0 (curry p)))\n\nunfold\nlet return_wp (#a:Type) (#st:Type0) (x:a) : wp st a =\n fun s0 p -> p x s0\n\nlet return (a:Type) (x:a) (st:Type0) : repr a st (return_wp x) =\n fun s0 -> (x, s0)\n\nunfold\nlet bind_wp (#a:Type) (#b:Type) (#st:Type0)\n (w1 : wp st a) (w2 : a -> wp st b) : wp st b =\n fun s0 p -> w1 s0 (fun y s1 -> w2 y s1 p)\n\nlet bind (a:Type) (b:Type) (st:Type0)\n (wp_c : wp st a)\n (wp_f : a -> wp st b)\n (c : repr a st wp_c)\n (f : (x:a -> repr b st (wp_f x)))\n: repr b st (bind_wp wp_c wp_f)\n= fun s0 ->\n //let (y, s1) = c s0 in\n //f y s1\n // GM: argh! using the match above introduces noise in the VC, a true precondition\n // that becomes a pain since we don't have monotonicity nor even extensionality\n let r = c s0 in\n f (fst r) (snd r)\n\nlet ite_wp #a #st (b:bool) (w1 w2 : wp st a) : wp st a =\n fun s0 p -> (b ==> w1 s0 p) /\\ ((~b) ==> w2 s0 p)\n\nlet if_then_else\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (f : repr a st wpf)\n (g : repr a st wpg)\n (b : bool)\n : Type\n = repr a st (ite_wp b wpf wpg)\n\nlet stronger\n (#a:Type) (#st:Type0)\n (w1 w2 : wp st a)\n : Type0\n = forall s0 p. w1 s0 p ==> w2 s0 p\n\nlet subcomp\n (a:Type)\n (st:Type0)\n (wpf wpg : wp st a)\n (#[@@@ refine] u : squash (stronger wpg wpf))\n (f : repr a st wpf)\n : repr a st wpg\n = f\n\ntotal\nreifiable\nreflectable\neffect {\n ST (a:Type) ([@@@ effect_param] st:Type0) (_:wp st a)\n with {repr; return; bind; subcomp; if_then_else}\n}\n\nlet lift_id_st_wp #a #st (w : ID5.wp a) : wp st a =\n elim_pure_wp_monotonicity_forall ();\n fun s0 p -> w (fun x -> p x s0)\n\nlet lift_id_st a wp st (f : ID5.repr a wp)\n : repr a st (lift_id_st_wp wp)\n = fun s0 -> ID5.ID?.reflect (ID5.bind _ _ _ _ f (fun x -> ID5.return _ (x, s0)))\n\nsub_effect ID ~> ST = lift_id_st\n\nlet null #st #a : wp st a =\n fun s0 p -> forall x s1. p x s1\n\nlet get #st () : ST st st (fun s0 p -> p s0 s0) =\n ST?.reflect (fun s0 -> (s0, s0))\n\nlet put #st (s:st) : ST unit st (fun _ p -> p () s) =\n ST?.reflect (fun _ -> ((), s))\n\n// this now works!!!\nlet test () : ST int int null =\n let x = get () in\n put (x + x);\n get () + get ()\n\nlet addx (x:int) : ST unit int (fun s0 p -> p () (s0+x)) =\n let y = get () in\n put (x+y)\n\nPartial implementation:\nlet add_via_state (x y: int) : ST int int (fun s0 p -> p (x + y) s0) =\n \n"
},
{
"role": "assistant",
"content": "let o = get () in\nput x;\naddx y;\nlet r = get () in\nput o;\nr"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Vale.X64.Machine_s.nat64 * (Prims.bool * Prims.bool))\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nPartial implementation:\nlet eval_Xor64 (dst src: nat64) : option (nat64 & (bool & bool)) =\n \n"
},
{
"role": "assistant",
"content": "Some (Vale.Def.Types_s.ixor dst src, (false, false))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n code: Vale.X64.Decls.va_code ->\n _win: Prims.bool ->\n out: Vale.Inline.X64.Fadd_inline.b64 ->\n f1: Vale.Inline.X64.Fadd_inline.b64 ->\n f2: Vale.Inline.X64.Fadd_inline.b64 ->\n va_s0: Vale.X64.Decls.va_state\n -> Prims.Ghost (Vale.X64.Decls.va_state * Vale.X64.Decls.va_fuel)\nEffect: Prims.Ghost\n\nContext:\nmodule Vale.Inline.X64.Fadd_inline\n\nopen FStar.Mul\nopen FStar.HyperStack.ST\nmodule HS = FStar.HyperStack\nmodule B = LowStar.Buffer\nmodule DV = LowStar.BufferView.Down\nopen Vale.Def.Types_s\n\nopen Vale.Interop.Base\nmodule IX64 = Vale.Interop.X64\nmodule VSig = Vale.AsLowStar.ValeSig\nmodule LSig = Vale.AsLowStar.LowStarSig\nmodule ME = Vale.X64.Memory\nmodule V = Vale.X64.Decls\nmodule IA = Vale.Interop.Assumptions\nmodule W = Vale.AsLowStar.Wrapper\nopen Vale.X64.MemoryAdapters\nmodule VS = Vale.X64.State\nmodule MS = Vale.X64.Machine_s\nmodule PR = Vale.X64.Print_Inline_s\n\nmodule FU = Vale.Curve25519.X64.FastUtil\nmodule FH = Vale.Curve25519.X64.FastHybrid\nmodule FW = Vale.Curve25519.X64.FastWide\n\nlet uint64 = UInt64.t\n\n(* A little utility to trigger normalization in types *)\nlet as_t (#a:Type) (x:normal a) : a = x\nlet as_normal_t (#a:Type) (x:a) : normal a = x\n\n[@__reduce__]\nlet b64 = buf_t TUInt64 TUInt64\n[@__reduce__]\nlet t64_mod = TD_Buffer TUInt64 TUInt64 default_bq\n[@__reduce__]\nlet t64_no_mod = TD_Buffer TUInt64 TUInt64 ({modified=false; strict_disjointness=false; taint=MS.Secret})\n[@__reduce__]\nlet tuint64 = TD_Base TUInt64\n\n[@__reduce__]\nlet dom: IX64.arity_ok 3 td =\n let y = [t64_mod; t64_no_mod; tuint64] in\n assert_norm (List.length y = 3);\n y\n\n(* Need to rearrange the order of arguments *)\n[@__reduce__]\nlet add1_pre : VSig.vale_pre dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state) ->\n FU.va_req_Fast_add1 c va_s0\n (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2)\n\n[@__reduce__]\nlet add1_post : VSig.vale_post dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state)\n (va_s1:V.va_state)\n (f:V.va_fuel) ->\n FU.va_ens_Fast_add1 c va_s0 (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2) va_s1 f\n\n#set-options \"--z3rlimit 50\"\n\nlet add1_regs_modified: MS.reg_64 -> bool = fun (r:MS.reg_64) ->\n let open MS in\n if r = rRax || r = rRdx || r = rR8 || r = rR9 || r = rR10 || r = rR11 then true\n else false\n\nlet add1_xmms_modified = fun _ -> false\n\n[@__reduce__]\nlet add1_lemma'\n (code:V.va_code)\n (_win:bool)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state)\n : Ghost (V.va_state & V.va_fuel)\n (requires\n add1_pre code out f1 f2 va_s0)\n (ensures (fun (va_s1, f) ->\n V.eval_code code va_s0 f va_s1 /\\\n VSig.vale_calling_conventions va_s0 va_s1 add1_regs_modified add1_xmms_modified /\\\n add1_post code out f1 f2 va_s0 va_s1 f /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f1) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer out) /\\\n ME.buffer_writeable (as_vale_buffer out) /\\\n ME.buffer_writeable (as_vale_buffer f1) /\\\n ME.modifies (ME.loc_union (ME.loc_buffer (as_vale_buffer out))\n ME.loc_none) (VS.vs_get_vale_heap va_s0) (VS.vs_get_vale_heap va_s1)\n )) =\n let va_s1, f = FU.va_lemma_Fast_add1 code va_s0 (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2) in\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 out;\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f1;\n (va_s1, f)\n\n(* Prove that add1_lemma' has the required type *)\nlet add1_lemma = as_t #(VSig.vale_sig add1_regs_modified add1_xmms_modified add1_pre add1_post) add1_lemma'\n\nlet code_add1 = FU.va_code_Fast_add1 ()\n\nlet of_reg (r:MS.reg_64) : option (IX64.reg_nat 3) = match r with\n | 5 -> Some 0 // rdi\n | 4 -> Some 1 // rsi\n | 3 -> Some 2 // rdx\n | _ -> None\n\nlet of_arg (i:IX64.reg_nat 3) : MS.reg_64 = match i with\n | 0 -> MS.rRdi\n | 1 -> MS.rRsi\n | 2 -> MS.rRdx\n\nlet arg_reg : IX64.arg_reg_relation 3 = IX64.Rel of_reg of_arg\n\n(* Here's the type expected for the add1 wrapper *)\n[@__reduce__]\nlet lowstar_add1_t =\n assert_norm (List.length dom + List.length ([]<:list arg) <= 3);\n IX64.as_lowstar_sig_t_weak\n 3\n arg_reg\n add1_regs_modified\n add1_xmms_modified\n code_add1\n dom\n []\n _\n _\n // The boolean here doesn't matter\n (W.mk_prediction code_add1 dom [] (add1_lemma code_add1 IA.win))\n\n(* And here's the add1 wrapper itself *)\nlet lowstar_add1 : lowstar_add1_t =\n assert_norm (List.length dom + List.length ([]<:list arg) <= 3);\n IX64.wrap_weak\n 3\n arg_reg\n add1_regs_modified\n add1_xmms_modified\n code_add1\n dom\n (W.mk_prediction code_add1 dom [] (add1_lemma code_add1 IA.win))\n\nlet lowstar_add1_normal_t : normal lowstar_add1_t\n = as_normal_t #lowstar_add1_t lowstar_add1\n\nopen Vale.AsLowStar.MemoryHelpers\n\nlet add_scalar out f1 f2\n = DV.length_eq (get_downview out);\n DV.length_eq (get_downview f1);\n let (x, _) = lowstar_add1_normal_t out f1 f2 () in\n x\n\nlet add1_comments : list string =\n [\"Computes the addition of four-element f1 with value in f2\"; \"and returns the carry (if any)\"]\n\nlet add1_names (n:nat) =\n match n with\n | 0 -> \"out\"\n | 1 -> \"f1\"\n | 2 -> \"f2\"\n | _ -> \"\"\n\nlet add1_code_inline () : FStar.All.ML int =\n PR.print_inline \"add_scalar\" 0 None (List.length dom) dom add1_names code_add1 of_arg add1_regs_modified add1_comments\n\n\n[@__reduce__]\nlet fadd_dom: IX64.arity_ok_stdcall td =\n let y = [t64_mod; t64_no_mod; t64_no_mod] in\n assert_norm (List.length y = 3);\n y\n\n(* Need to rearrange the order of arguments *)\n[@__reduce__]\nlet fadd_pre : VSig.vale_pre fadd_dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:b64)\n (va_s0:V.va_state) ->\n FH.va_req_Fadd c va_s0\n (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2)\n\n[@__reduce__]\nlet fadd_post : VSig.vale_post fadd_dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:b64)\n (va_s0:V.va_state)\n (va_s1:V.va_state)\n (f:V.va_fuel) ->\n FH.va_ens_Fadd c va_s0 (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2) va_s1 f\n\n#set-options \"--z3rlimit 50\"\n\nlet fadd_regs_modified: MS.reg_64 -> bool = fun (r:MS.reg_64) ->\n let open MS in\n if r = rRax || r = rRcx || r = rRdx || r = rR8 || r = rR9 || r = rR10 || r = rR11 then true\n else false\n\nlet fadd_xmms_modified = fun _ -> false\n\n[@__reduce__]\nlet fadd_lemma'\n (code:V.va_code)\n (_win:bool)\n (out:b64)\n (f1:b64)\n (f2:b64)\n (va_s0:V.va_state)\n : Ghost (V.va_state & V.va_fuel)\n (requires\n fadd_pre code out f1 f2 va_s0)\n (ensures (fun (va_s1, f) ->\n V.eval_code code va_s0 f va_s1 /\\\n VSig.vale_calling_conventions va_s0 va_s1 fadd_regs_modified fadd_xmms_modified /\\\n fadd_post code out f1 f2 va_s0 va_s1 f /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer out) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f1) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f2) /\\\n ME.buffer_writeable (as_vale_buffer out) /\\\n ME.buffer_writeable (as_vale_buffer f1) /\\\n ME.buffer_writeable (as_vale_buffer f2) /\\\n ME.modifies (ME.loc_union (ME.loc_buffer (as_vale_buffer out))\n ME.loc_none) (VS.vs_get_vale_heap va_s0) (VS.vs_get_vale_heap va_s1)\n )) =\n let va_s1, f = FH.va_lemma_Fadd code va_s0 (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2) in\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 out;\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f1;\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f2;\n (va_s1, f)\n\n(* Prove that add1_lemma' has the required type *)\nlet fadd_lemma = as_t #(VSig.vale_sig fadd_regs_modified fadd_xmms_modified fadd_pre fadd_post) fadd_lemma'\n\nlet code_Fadd = FH.va_code_Fadd ()\n\n(* Here's the type expected for the fadd wrapper *)\n[@__reduce__]\nlet lowstar_fadd_t =\n assert_norm (List.length fadd_dom + List.length ([]<:list arg) <= 3);\n IX64.as_lowstar_sig_t_weak\n 3\n arg_reg\n fadd_regs_modified\n fadd_xmms_modified\n code_Fadd\n fadd_dom\n []\n _\n _\n // The boolean here doesn't matter\n (W.mk_prediction code_Fadd fadd_dom [] (fadd_lemma code_Fadd IA.win))\n\n(* And here's the fadd wrapper itself *)\nlet lowstar_fadd : lowstar_fadd_t =\n assert_norm (List.length fadd_dom + List.length ([]<:list arg) <= 3);\n IX64.wrap_weak\n 3\n arg_reg\n fadd_regs_modified\n fadd_xmms_modified\n code_Fadd\n fadd_dom\n (W.mk_prediction code_Fadd fadd_dom [] (fadd_lemma code_Fadd IA.win))\n\nlet lowstar_fadd_normal_t : normal lowstar_fadd_t\n = as_normal_t #lowstar_fadd_t lowstar_fadd\n\nlet fadd out f1 f2\n = DV.length_eq (get_downview out);\n DV.length_eq (get_downview f1);\n DV.length_eq (get_downview f2);\n let (x, _) = lowstar_fadd_normal_t out f1 f2 () in\n ()\n\nlet fadd_comments : list string = [\"Computes the field addition of two field elements\"]\n\nlet fadd_names (n:nat) =\n match n with\n | 0 -> \"out\"\n | 1 -> \"f1\"\n | 2 -> \"f2\"\n | _ -> \"\"\n\nlet fadd_code_inline () : FStar.All.ML int =\n PR.print_inline \"fadd\" 0 None (List.length fadd_dom) fadd_dom fadd_names code_Fadd of_arg fadd_regs_modified fadd_comments\n\n[@__reduce__]\nlet fsub_dom: IX64.arity_ok_stdcall td =\n let y = [t64_mod; t64_no_mod; t64_no_mod] in\n assert_norm (List.length y = 3);\n y\n\n(* Need to rearrange the order of arguments *)\n[@__reduce__]\nlet fsub_pre : VSig.vale_pre fsub_dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:b64)\n (va_s0:V.va_state) ->\n FH.va_req_Fsub c va_s0\n (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2)\n\n[@__reduce__]\nlet fsub_post : VSig.vale_post fsub_dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:b64)\n (va_s0:V.va_state)\n (va_s1:V.va_state)\n (f:V.va_fuel) ->\n FH.va_ens_Fsub c va_s0 (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2) va_s1 f\n\n#set-options \"--z3rlimit 200\"\n\nlet fsub_regs_modified: MS.reg_64 -> bool = fun (r:MS.reg_64) ->\n let open MS in\n if r = rRax || r = rRcx || r = rR8 || r = rR9 || r = rR10 || r = rR11 then true\n else false\n\nlet fsub_xmms_modified = fun _ -> false\n\nPartial implementation:\nlet fsub_lemma' (code: V.va_code) (_win: bool) (out f1 f2: b64) (va_s0: V.va_state)\n : Ghost (V.va_state & V.va_fuel)\n (requires fsub_pre code out f1 f2 va_s0)\n (ensures\n (fun (va_s1, f) ->\n V.eval_code code va_s0 f va_s1 /\\\n VSig.vale_calling_conventions va_s0 va_s1 fsub_regs_modified fsub_xmms_modified /\\\n fsub_post code out f1 f2 va_s0 va_s1 f /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer out) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f1) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f2) /\\\n ME.buffer_writeable (as_vale_buffer out) /\\ ME.buffer_writeable (as_vale_buffer f1) /\\\n ME.buffer_writeable (as_vale_buffer f2) /\\\n ME.modifies (ME.loc_union (ME.loc_buffer (as_vale_buffer out)) ME.loc_none)\n (VS.vs_get_vale_heap va_s0)\n (VS.vs_get_vale_heap va_s1))) =\n \n"
},
{
"role": "assistant",
"content": "let va_s1, f =\n FH.va_lemma_Fsub code va_s0 (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2)\nin\nVale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 out;\nVale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f1;\nVale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f2;\n(va_s1, f)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> amt: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nPartial implementation:\nlet eval_Shr64 (dst amt: nat64) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nPartial implementation:\nlet eval_And64 (dst src: nat64) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "Some (iand dst src)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: x: FStar.Pervasives.Native.option a -> FStar.Pervasives.Native.option a\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nPartial implementation:\nlet check_avx (#a: Type0) (x: option a) : option a =\n \n"
},
{
"role": "assistant",
"content": "if avx_enabled then x else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: x: FStar.Pervasives.Native.option a -> FStar.Pervasives.Native.option a\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nPartial implementation:\nlet check_sse2 (#a: Type0) (x: option a) : option a =\n \n"
},
{
"role": "assistant",
"content": "if sse2_enabled then x else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n c: Pulse.Syntax.Base.comp_st ->\n pre_typing: Pulse.Typing.tot_typing g (Pulse.Syntax.Base.comp_pre c) Pulse.Syntax.Base.tm_vprop ->\n p:\n Pulse.Typing.post_hint_for_env g\n {Pulse.Typing.comp_post_matches_hint c (FStar.Pervasives.Native.Some p)}\n -> FStar.Tactics.Effect.Tac (Pulse.Typing.comp_typing_u g c)\nEffect: FStar.Tactics.Effect.Tac\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nPartial implementation:\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p: post_hint_for_env g {comp_post_matches_hint c (Some p)})\n : T.Tac (comp_typing_u g c) =\n \n"
},
{
"role": "assistant",
"content": "let x = fresh g in\nif x `Set.mem` (freevars p.post)\nthen fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\nelse\n let post_typing = post_hint_typing g p x in\n intro_comp_typing g\n c\n pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing\n x\n post_typing.post_typing"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: rcx: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Vale.X64.Machine_s.nat64 * Vale.X64.Machine_s.nat64)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n\nPartial implementation:\nlet eval_Xgetbv (rcx: nat64) : option (nat64 & nat64) =\n \n"
},
{
"role": "assistant",
"content": "if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> amt: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nPartial implementation:\nlet eval_Shl64 (dst amt: nat64) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: x: FStar.Pervasives.Native.option a -> FStar.Pervasives.Native.option a\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nPartial implementation:\nlet check_sse4_1 (#a: Type0) (x: option a) : option a =\n \n"
},
{
"role": "assistant",
"content": "if sse4_1_enabled then x else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: rax: Vale.X64.Machine_s.nat64 -> rcx: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Vale.X64.Machine_s.nat64 *\n (Vale.X64.Machine_s.nat64 * (Vale.X64.Machine_s.nat64 * Vale.X64.Machine_s.nat64)))\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nPartial implementation:\nlet eval_Cpuid (rax rcx: nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n \n"
},
{
"role": "assistant",
"content": "Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n g: Pulse.Typing.Env.env ->\n c: Pulse.Syntax.Base.comp_st ->\n pre_typing: Pulse.Typing.tot_typing g (Pulse.Syntax.Base.comp_pre c) Pulse.Syntax.Base.tm_vprop ->\n iname_typing: Pulse.Typing.effect_annot_typing g (Pulse.Syntax.Base.effect_annot_of_comp c) ->\n res_typing:\n Pulse.Typing.universe_of g (Pulse.Syntax.Base.comp_res c) (Pulse.Syntax.Base.comp_u c) ->\n x:\n Pulse.Syntax.Base.var\n {Pulse.Typing.fresh_wrt x g (Pulse.Syntax.Naming.freevars (Pulse.Syntax.Base.comp_post c))} ->\n post_typing:\n Pulse.Typing.tot_typing (Pulse.Typing.Env.push_binding g\n x\n Pulse.Syntax.Base.ppname_default\n (Pulse.Syntax.Base.comp_res c))\n (Pulse.Syntax.Naming.open_term (Pulse.Syntax.Base.comp_post c) x)\n Pulse.Syntax.Base.tm_vprop\n -> FStar.Tactics.Effect.Tac (Pulse.Typing.comp_typing g c (Pulse.Syntax.Base.universe_of_comp c))\nEffect: FStar.Tactics.Effect.Tac\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nPartial implementation:\nlet intro_comp_typing\n (g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (i_typing: effect_annot_typing g (effect_annot_of_comp c))\n (res_typing: universe_of g (comp_res c) (comp_u c))\n (x: var{fresh_wrt x g (freevars (comp_post c))})\n (post_typing:\n tot_typing (push_binding g x ppname_default (comp_res c))\n (open_term (comp_post c) x)\n tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c)) =\n \n"
},
{
"role": "assistant",
"content": "let intro_st_comp_typing\n (st:\n st_comp\n { comp_u c == st.u /\\ comp_pre c == st.pre /\\ comp_res c == st.res /\\\n comp_post c == st.post })\n : T.Tac (st_comp_typing g st) =\n STC g st x res_typing pre_typing post_typing\nin\nmatch c with\n| C_ST st ->\n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n| C_STAtomic i obs st ->\n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n| C_STGhost st ->\n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: x: FStar.Pervasives.Native.option a -> FStar.Pervasives.Native.option a\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nPartial implementation:\nlet check_ssse3 (#a: Type0) (x: option a) : option a =\n \n"
},
{
"role": "assistant",
"content": "if ssse3_enabled then x else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.quad32 -> src: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nPartial implementation:\nlet eval_Pand (dst src: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Prims.bool * Vale.X64.Machine_s.nat64)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nPartial implementation:\nlet eval_Add64 (dst src: nat64) : option (bool & nat64) =\n \n"
},
{
"role": "assistant",
"content": "let sum = dst + src in\nSome (sum >= pow2_64, sum % pow2_64)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src: Vale.X64.Machine_s.quad32 -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nPartial implementation:\nlet eval_Movdqu (src: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_sse2 (Some src)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nPartial implementation:\nlet eval_VPxor (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_avx (Some (quad32_xor src1 src2))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: out: Vale.Inline.X64.Fadd_inline.u256 -> f1: Vale.Inline.X64.Fadd_inline.u256 -> f2: FStar.UInt64.t\n -> FStar.HyperStack.ST.Stack (FStar.Ghost.erased FStar.UInt64.t)\nEffect: FStar.HyperStack.ST.Stack\n\nContext:\nmodule Vale.Inline.X64.Fadd_inline\n\nopen FStar.Mul\nopen FStar.HyperStack.ST\nmodule HS = FStar.HyperStack\nmodule B = LowStar.Buffer\nmodule DV = LowStar.BufferView.Down\nopen Vale.Def.Types_s\n\nopen Vale.Interop.Base\nmodule IX64 = Vale.Interop.X64\nmodule VSig = Vale.AsLowStar.ValeSig\nmodule LSig = Vale.AsLowStar.LowStarSig\nmodule ME = Vale.X64.Memory\nmodule V = Vale.X64.Decls\nmodule IA = Vale.Interop.Assumptions\nmodule W = Vale.AsLowStar.Wrapper\nopen Vale.X64.MemoryAdapters\nmodule VS = Vale.X64.State\nmodule MS = Vale.X64.Machine_s\nmodule PR = Vale.X64.Print_Inline_s\n\nmodule FU = Vale.Curve25519.X64.FastUtil\nmodule FH = Vale.Curve25519.X64.FastHybrid\nmodule FW = Vale.Curve25519.X64.FastWide\n\nlet uint64 = UInt64.t\n\n(* A little utility to trigger normalization in types *)\nlet as_t (#a:Type) (x:normal a) : a = x\nlet as_normal_t (#a:Type) (x:a) : normal a = x\n\n[@__reduce__]\nlet b64 = buf_t TUInt64 TUInt64\n[@__reduce__]\nlet t64_mod = TD_Buffer TUInt64 TUInt64 default_bq\n[@__reduce__]\nlet t64_no_mod = TD_Buffer TUInt64 TUInt64 ({modified=false; strict_disjointness=false; taint=MS.Secret})\n[@__reduce__]\nlet tuint64 = TD_Base TUInt64\n\n[@__reduce__]\nlet dom: IX64.arity_ok 3 td =\n let y = [t64_mod; t64_no_mod; tuint64] in\n assert_norm (List.length y = 3);\n y\n\n(* Need to rearrange the order of arguments *)\n[@__reduce__]\nlet add1_pre : VSig.vale_pre dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state) ->\n FU.va_req_Fast_add1 c va_s0\n (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2)\n\n[@__reduce__]\nlet add1_post : VSig.vale_post dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state)\n (va_s1:V.va_state)\n (f:V.va_fuel) ->\n FU.va_ens_Fast_add1 c va_s0 (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2) va_s1 f\n\n#set-options \"--z3rlimit 50\"\n\nlet add1_regs_modified: MS.reg_64 -> bool = fun (r:MS.reg_64) ->\n let open MS in\n if r = rRax || r = rRdx || r = rR8 || r = rR9 || r = rR10 || r = rR11 then true\n else false\n\nlet add1_xmms_modified = fun _ -> false\n\n[@__reduce__]\nlet add1_lemma'\n (code:V.va_code)\n (_win:bool)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state)\n : Ghost (V.va_state & V.va_fuel)\n (requires\n add1_pre code out f1 f2 va_s0)\n (ensures (fun (va_s1, f) ->\n V.eval_code code va_s0 f va_s1 /\\\n VSig.vale_calling_conventions va_s0 va_s1 add1_regs_modified add1_xmms_modified /\\\n add1_post code out f1 f2 va_s0 va_s1 f /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f1) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer out) /\\\n ME.buffer_writeable (as_vale_buffer out) /\\\n ME.buffer_writeable (as_vale_buffer f1) /\\\n ME.modifies (ME.loc_union (ME.loc_buffer (as_vale_buffer out))\n ME.loc_none) (VS.vs_get_vale_heap va_s0) (VS.vs_get_vale_heap va_s1)\n )) =\n let va_s1, f = FU.va_lemma_Fast_add1 code va_s0 (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2) in\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 out;\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f1;\n (va_s1, f)\n\n(* Prove that add1_lemma' has the required type *)\nlet add1_lemma = as_t #(VSig.vale_sig add1_regs_modified add1_xmms_modified add1_pre add1_post) add1_lemma'\n\nlet code_add1 = FU.va_code_Fast_add1 ()\n\nlet of_reg (r:MS.reg_64) : option (IX64.reg_nat 3) = match r with\n | 5 -> Some 0 // rdi\n | 4 -> Some 1 // rsi\n | 3 -> Some 2 // rdx\n | _ -> None\n\nlet of_arg (i:IX64.reg_nat 3) : MS.reg_64 = match i with\n | 0 -> MS.rRdi\n | 1 -> MS.rRsi\n | 2 -> MS.rRdx\n\nlet arg_reg : IX64.arg_reg_relation 3 = IX64.Rel of_reg of_arg\n\n(* Here's the type expected for the add1 wrapper *)\n[@__reduce__]\nlet lowstar_add1_t =\n assert_norm (List.length dom + List.length ([]<:list arg) <= 3);\n IX64.as_lowstar_sig_t_weak\n 3\n arg_reg\n add1_regs_modified\n add1_xmms_modified\n code_add1\n dom\n []\n _\n _\n // The boolean here doesn't matter\n (W.mk_prediction code_add1 dom [] (add1_lemma code_add1 IA.win))\n\n(* And here's the add1 wrapper itself *)\nlet lowstar_add1 : lowstar_add1_t =\n assert_norm (List.length dom + List.length ([]<:list arg) <= 3);\n IX64.wrap_weak\n 3\n arg_reg\n add1_regs_modified\n add1_xmms_modified\n code_add1\n dom\n (W.mk_prediction code_add1 dom [] (add1_lemma code_add1 IA.win))\n\nlet lowstar_add1_normal_t : normal lowstar_add1_t\n = as_normal_t #lowstar_add1_t lowstar_add1\n\nopen Vale.AsLowStar.MemoryHelpers\n\nPartial implementation:\nlet add_scalar out f1 f2 =\n \n"
},
{
"role": "assistant",
"content": "DV.length_eq (get_downview out);\nDV.length_eq (get_downview f1);\nlet x, _ = lowstar_add1_normal_t out f1 f2 () in\nx"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.quad32 -> src: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nPartial implementation:\nlet eval_Pxor (dst src: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_sse2 (Some (quad32_xor dst src))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n code: Vale.X64.Decls.va_code ->\n _win: Prims.bool ->\n out: Vale.Inline.X64.Fadd_inline.b64 ->\n f1: Vale.Inline.X64.Fadd_inline.b64 ->\n f2: Vale.Inline.X64.Fadd_inline.b64 ->\n va_s0: Vale.X64.Decls.va_state\n -> Prims.Ghost (Vale.X64.Decls.va_state * Vale.X64.Decls.va_fuel)\nEffect: Prims.Ghost\n\nContext:\nmodule Vale.Inline.X64.Fadd_inline\n\nopen FStar.Mul\nopen FStar.HyperStack.ST\nmodule HS = FStar.HyperStack\nmodule B = LowStar.Buffer\nmodule DV = LowStar.BufferView.Down\nopen Vale.Def.Types_s\n\nopen Vale.Interop.Base\nmodule IX64 = Vale.Interop.X64\nmodule VSig = Vale.AsLowStar.ValeSig\nmodule LSig = Vale.AsLowStar.LowStarSig\nmodule ME = Vale.X64.Memory\nmodule V = Vale.X64.Decls\nmodule IA = Vale.Interop.Assumptions\nmodule W = Vale.AsLowStar.Wrapper\nopen Vale.X64.MemoryAdapters\nmodule VS = Vale.X64.State\nmodule MS = Vale.X64.Machine_s\nmodule PR = Vale.X64.Print_Inline_s\n\nmodule FU = Vale.Curve25519.X64.FastUtil\nmodule FH = Vale.Curve25519.X64.FastHybrid\nmodule FW = Vale.Curve25519.X64.FastWide\n\nlet uint64 = UInt64.t\n\n(* A little utility to trigger normalization in types *)\nlet as_t (#a:Type) (x:normal a) : a = x\nlet as_normal_t (#a:Type) (x:a) : normal a = x\n\n[@__reduce__]\nlet b64 = buf_t TUInt64 TUInt64\n[@__reduce__]\nlet t64_mod = TD_Buffer TUInt64 TUInt64 default_bq\n[@__reduce__]\nlet t64_no_mod = TD_Buffer TUInt64 TUInt64 ({modified=false; strict_disjointness=false; taint=MS.Secret})\n[@__reduce__]\nlet tuint64 = TD_Base TUInt64\n\n[@__reduce__]\nlet dom: IX64.arity_ok 3 td =\n let y = [t64_mod; t64_no_mod; tuint64] in\n assert_norm (List.length y = 3);\n y\n\n(* Need to rearrange the order of arguments *)\n[@__reduce__]\nlet add1_pre : VSig.vale_pre dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state) ->\n FU.va_req_Fast_add1 c va_s0\n (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2)\n\n[@__reduce__]\nlet add1_post : VSig.vale_post dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state)\n (va_s1:V.va_state)\n (f:V.va_fuel) ->\n FU.va_ens_Fast_add1 c va_s0 (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2) va_s1 f\n\n#set-options \"--z3rlimit 50\"\n\nlet add1_regs_modified: MS.reg_64 -> bool = fun (r:MS.reg_64) ->\n let open MS in\n if r = rRax || r = rRdx || r = rR8 || r = rR9 || r = rR10 || r = rR11 then true\n else false\n\nlet add1_xmms_modified = fun _ -> false\n\n[@__reduce__]\nlet add1_lemma'\n (code:V.va_code)\n (_win:bool)\n (out:b64)\n (f1:b64)\n (f2:uint64)\n (va_s0:V.va_state)\n : Ghost (V.va_state & V.va_fuel)\n (requires\n add1_pre code out f1 f2 va_s0)\n (ensures (fun (va_s1, f) ->\n V.eval_code code va_s0 f va_s1 /\\\n VSig.vale_calling_conventions va_s0 va_s1 add1_regs_modified add1_xmms_modified /\\\n add1_post code out f1 f2 va_s0 va_s1 f /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f1) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer out) /\\\n ME.buffer_writeable (as_vale_buffer out) /\\\n ME.buffer_writeable (as_vale_buffer f1) /\\\n ME.modifies (ME.loc_union (ME.loc_buffer (as_vale_buffer out))\n ME.loc_none) (VS.vs_get_vale_heap va_s0) (VS.vs_get_vale_heap va_s1)\n )) =\n let va_s1, f = FU.va_lemma_Fast_add1 code va_s0 (as_vale_buffer out) (as_vale_buffer f1) (UInt64.v f2) in\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 out;\n Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f1;\n (va_s1, f)\n\n(* Prove that add1_lemma' has the required type *)\nlet add1_lemma = as_t #(VSig.vale_sig add1_regs_modified add1_xmms_modified add1_pre add1_post) add1_lemma'\n\nlet code_add1 = FU.va_code_Fast_add1 ()\n\nlet of_reg (r:MS.reg_64) : option (IX64.reg_nat 3) = match r with\n | 5 -> Some 0 // rdi\n | 4 -> Some 1 // rsi\n | 3 -> Some 2 // rdx\n | _ -> None\n\nlet of_arg (i:IX64.reg_nat 3) : MS.reg_64 = match i with\n | 0 -> MS.rRdi\n | 1 -> MS.rRsi\n | 2 -> MS.rRdx\n\nlet arg_reg : IX64.arg_reg_relation 3 = IX64.Rel of_reg of_arg\n\n(* Here's the type expected for the add1 wrapper *)\n[@__reduce__]\nlet lowstar_add1_t =\n assert_norm (List.length dom + List.length ([]<:list arg) <= 3);\n IX64.as_lowstar_sig_t_weak\n 3\n arg_reg\n add1_regs_modified\n add1_xmms_modified\n code_add1\n dom\n []\n _\n _\n // The boolean here doesn't matter\n (W.mk_prediction code_add1 dom [] (add1_lemma code_add1 IA.win))\n\n(* And here's the add1 wrapper itself *)\nlet lowstar_add1 : lowstar_add1_t =\n assert_norm (List.length dom + List.length ([]<:list arg) <= 3);\n IX64.wrap_weak\n 3\n arg_reg\n add1_regs_modified\n add1_xmms_modified\n code_add1\n dom\n (W.mk_prediction code_add1 dom [] (add1_lemma code_add1 IA.win))\n\nlet lowstar_add1_normal_t : normal lowstar_add1_t\n = as_normal_t #lowstar_add1_t lowstar_add1\n\nopen Vale.AsLowStar.MemoryHelpers\n\nlet add_scalar out f1 f2\n = DV.length_eq (get_downview out);\n DV.length_eq (get_downview f1);\n let (x, _) = lowstar_add1_normal_t out f1 f2 () in\n x\n\nlet add1_comments : list string =\n [\"Computes the addition of four-element f1 with value in f2\"; \"and returns the carry (if any)\"]\n\nlet add1_names (n:nat) =\n match n with\n | 0 -> \"out\"\n | 1 -> \"f1\"\n | 2 -> \"f2\"\n | _ -> \"\"\n\nlet add1_code_inline () : FStar.All.ML int =\n PR.print_inline \"add_scalar\" 0 None (List.length dom) dom add1_names code_add1 of_arg add1_regs_modified add1_comments\n\n\n[@__reduce__]\nlet fadd_dom: IX64.arity_ok_stdcall td =\n let y = [t64_mod; t64_no_mod; t64_no_mod] in\n assert_norm (List.length y = 3);\n y\n\n(* Need to rearrange the order of arguments *)\n[@__reduce__]\nlet fadd_pre : VSig.vale_pre fadd_dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:b64)\n (va_s0:V.va_state) ->\n FH.va_req_Fadd c va_s0\n (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2)\n\n[@__reduce__]\nlet fadd_post : VSig.vale_post fadd_dom =\n fun (c:V.va_code)\n (out:b64)\n (f1:b64)\n (f2:b64)\n (va_s0:V.va_state)\n (va_s1:V.va_state)\n (f:V.va_fuel) ->\n FH.va_ens_Fadd c va_s0 (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2) va_s1 f\n\n#set-options \"--z3rlimit 50\"\n\nlet fadd_regs_modified: MS.reg_64 -> bool = fun (r:MS.reg_64) ->\n let open MS in\n if r = rRax || r = rRcx || r = rRdx || r = rR8 || r = rR9 || r = rR10 || r = rR11 then true\n else false\n\nlet fadd_xmms_modified = fun _ -> false\n\nPartial implementation:\nlet fadd_lemma' (code: V.va_code) (_win: bool) (out f1 f2: b64) (va_s0: V.va_state)\n : Ghost (V.va_state & V.va_fuel)\n (requires fadd_pre code out f1 f2 va_s0)\n (ensures\n (fun (va_s1, f) ->\n V.eval_code code va_s0 f va_s1 /\\\n VSig.vale_calling_conventions va_s0 va_s1 fadd_regs_modified fadd_xmms_modified /\\\n fadd_post code out f1 f2 va_s0 va_s1 f /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer out) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f1) /\\\n ME.buffer_readable (VS.vs_get_vale_heap va_s1) (as_vale_buffer f2) /\\\n ME.buffer_writeable (as_vale_buffer out) /\\ ME.buffer_writeable (as_vale_buffer f1) /\\\n ME.buffer_writeable (as_vale_buffer f2) /\\\n ME.modifies (ME.loc_union (ME.loc_buffer (as_vale_buffer out)) ME.loc_none)\n (VS.vs_get_vale_heap va_s0)\n (VS.vs_get_vale_heap va_s1))) =\n \n"
},
{
"role": "assistant",
"content": "let va_s1, f =\n FH.va_lemma_Fadd code va_s0 (as_vale_buffer out) (as_vale_buffer f1) (as_vale_buffer f2)\nin\nVale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 out;\nVale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f1;\nVale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt64 ME.TUInt64 f2;\n(va_s1, f)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: amt: Prims.int -> dst: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nPartial implementation:\nlet eval_Pslld (amt: int) (dst: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n alg: Vale.AES.AES_common_s.algorithm ->\n inout_b: Vale.X64.Memory.buffer128 ->\n key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->\n round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n keys_b: Vale.X64.Memory.buffer128 ->\n hkeys_b: Vale.X64.Memory.buffer128 ->\n total_bytes: Prims.nat ->\n old_hash: Vale.X64.Decls.quad32 ->\n completed_quads: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n h_LE: Vale.X64.Decls.quad32 ->\n va_s0: Vale.X64.Decls.va_state ->\n va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)\n -> Prims.Ghost ((Vale.X64.Decls.va_state * Vale.X64.Decls.va_fuel) * Prims.unit)\nEffect: Prims.Ghost\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n\nPartial implementation:\nlet va_wpProof_Gcm_extra_bytes\n alg\n inout_b\n key\n round_keys\n keys_b\n hkeys_b\n total_bytes\n old_hash\n completed_quads\n h_LE\n va_s0\n va_k\n =\n \n"
},
{
"role": "assistant",
"content": "let va_sM, va_f0 =\n va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key round_keys keys_b\n hkeys_b total_bytes old_hash completed_quads h_LE\nin\nva_lemma_upd_update va_sM;\nassert (va_state_eq va_sM\n (va_update_flags va_sM\n (va_update_mem_heaplet 5\n va_sM\n (va_update_xmm 10\n va_sM\n (va_update_xmm 8\n va_sM\n (va_update_xmm 7\n va_sM\n (va_update_xmm 6\n va_sM\n (va_update_xmm 5\n va_sM\n (va_update_xmm 4\n va_sM\n (va_update_xmm 3\n va_sM\n (va_update_xmm 2\n va_sM\n (va_update_xmm 1\n va_sM\n (va_update_xmm 0\n va_sM\n (va_update_reg64 rR11\n va_sM\n (va_update_reg64 rRcx\n va_sM\n (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))\n )))))))));\nva_lemma_norm_mods ([\n va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;\n va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;\n va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem\n ])\n va_sM\n va_s0;\nlet va_g = () in\n(va_sM, va_f0, va_g)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n g: Pulse.Typing.Env.env ->\n e: Pulse.Syntax.Base.term ->\n eff: FStar.Stubs.TypeChecker.Core.tot_or_ghost ->\n t0: Pulse.Syntax.Base.term ->\n d: Pulse.Typing.typing g e eff t0 ->\n t1: Pulse.Syntax.Base.term ->\n d1: Pulse.Typing.tot_typing g t1 (Pulse.Syntax.Pure.tm_type u553) ->\n steps: Prims.list FStar.Pervasives.norm_step\n -> FStar.Tactics.Effect.Tac (FStar.Pervasives.Native.option (Pulse.Typing.typing g e eff t1))\nEffect: FStar.Tactics.Effect.Tac\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nlet simplify_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post)\n = st_equiv_post d post (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))\n\nlet simplify_lemma (c:comp_st) (c':comp_st) (post_hint:option post_hint_t)\n : Lemma\n (requires\n comp_post_matches_hint c post_hint /\\\n effect_annot_of_comp c == effect_annot_of_comp c' /\\\n comp_res c' == comp_res c /\\\n comp_u c' == comp_u c /\\\n comp_post c' == tm_star (comp_post c) tm_emp)\n (ensures comp_post_matches_hint (comp_st_with_post c' (comp_post c)) post_hint /\\\n comp_pre (comp_st_with_post c' (comp_post c)) == comp_pre c')\n = ()\n\nlet vprop_equiv_typing_bk (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g p ctxt)\n : tot_typing g p tm_vprop \n = let _, bk = vprop_equiv_typing d in\n bk ctxt_typing\n\nlet comp_with_pre (c:comp_st) (pre:term) =\n match c with\n | C_ST st -> C_ST { st with pre }\n | C_STGhost st -> C_STGhost { st with pre }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with pre}\n\n\nlet st_equiv_pre (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (pre:term)\n (veq: vprop_equiv g (comp_pre c) pre)\n : st_typing g t (comp_with_pre c pre)\n = if eq_tm pre (comp_pre c) then d\n else\n let c' = comp_with_pre c pre in\n let (| u_of, pre_typing, x, post_typing |) =\n Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) veq (VE_Refl _ _)\n in\n t_equiv d st_equiv\n\n\n#push-options \"--z3rlimit_factor 4 --ifuel 2 --fuel 0\"\nlet k_elab_equiv_continuation (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt #ctxt1 #ctxt2:term)\n (k:continuation_elaborator g1 ctxt g2 ctxt1)\n (d:vprop_equiv g2 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt g2 ctxt2 =\n fun post_hint res ->\n let (| st, c, st_d |) = res in\n let st_d : st_typing g2 st c = st_d in\n assert (comp_pre c == ctxt2);\n let st_d' : st_typing g2 st (comp_with_pre c ctxt1) = st_equiv_pre st_d _ (VE_Sym _ _ _ d) in\n k post_hint (| st, _, st_d' |)\n#pop-options\n\nlet vprop_equiv_typing_fwd (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g ctxt p)\n : tot_typing g p tm_vprop \n = let fwd, _ = vprop_equiv_typing d in\n fwd ctxt_typing\n\n#push-options \"--z3rlimit_factor 4 --ifuel 1 --fuel 0\"\nlet k_elab_equiv_prefix\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt2 #ctxt:term)\n (k:continuation_elaborator g1 ctxt1 g2 ctxt)\n (d:vprop_equiv g1 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt2 g2 ctxt =\n fun post_hint res ->\n let framing_token : frame_for_req_in_ctxt g1 ctxt2 ctxt1 = \n let d = VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Trans _ _ _ _ (VE_Unit _ _) d) in\n (| tm_emp, emp_typing, d |)\n in\n let res = k post_hint res in\n let (| st, c, st_d |) = res in\n assert (comp_pre c == ctxt1);\n (| _, _, st_equiv_pre st_d _ d |)\n #pop-options\n\nlet k_elab_equiv\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt1' #ctxt2 #ctxt2':term) \n (k:continuation_elaborator g1 ctxt1 g2 ctxt2)\n (d1:vprop_equiv g1 ctxt1 ctxt1')\n (d2:vprop_equiv g2 ctxt2 ctxt2')\n : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n \n let k : continuation_elaborator g1 ctxt1 g2 ctxt2' =\n k_elab_equiv_continuation k d2 in\n let k : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n k_elab_equiv_prefix k d1 in\n k\n\n#push-options \"--query_stats --fuel 2 --ifuel 2 --split_queries no --z3rlimit_factor 20\"\nopen Pulse.PP\nlet continuation_elaborator_with_bind (#g:env) (ctxt:term)\n (#c1:comp{stateful_comp c1})\n (#e1:st_term)\n (e1_typing:st_typing g e1 c1)\n (ctxt_pre1_typing:tot_typing g (tm_star ctxt (comp_pre c1)) tm_vprop)\n (x:nvar { None? (lookup g (snd x)) })\n : T.Tac (continuation_elaborator\n g\n (tm_star ctxt (comp_pre c1))\n (push_binding g (snd x) (fst x) (comp_res c1))\n (tm_star (open_term (comp_post c1) (snd x)) ctxt)) =\n\n let pre1 = comp_pre c1 in\n let res1 = comp_res c1 in\n let post1 = comp_post c1 in\n let ctxt_typing = star_typing_inversion_l ctxt_pre1_typing in\n // let p_prop = Metatheory.pure_typing_inversion pure_typing in\n let v_eq = VE_Comm g ctxt pre1 in\n let framing_token : frame_for_req_in_ctxt g (tm_star ctxt pre1) pre1 = \n (| ctxt, ctxt_typing, VE_Comm g pre1 ctxt |)\n in\n let (| c1, e1_typing |) =\n apply_frame ctxt_pre1_typing e1_typing framing_token in\n let (| u_of_1, pre_typing, _, _ |) =\n Metatheory.(st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness e1_typing))) in\n let b = res1 in\n let ppname, x = x in\n let g' = push_binding g x ppname b in\n \n let post1_opened = open_term_nv post1 (v_as_nv x) in\n let k : continuation_elaborator g (tm_star ctxt pre1) g' (tm_star post1_opened ctxt) =\n fun post_hint res ->\n let (| e2, c2, e2_typing |) = res in\n assert (comp_post_matches_hint c2 post_hint);\n let e2_typing : st_typing g' e2 c2 = e2_typing in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term e2_closed x == e2);\n assert (comp_pre c1 == (tm_star ctxt pre1));\n assert (comp_post c1 == tm_star post1 ctxt);\n assert (comp_pre c2 == tm_star post1_opened ctxt);\n assert (open_term (comp_post c1) x == tm_star post1_opened (open_term ctxt x));\n // ctxt is well-typed, hence ln\n assume (open_term ctxt x == ctxt);\n assert (open_term (comp_post c1) x == comp_pre c2);\n // we closed e2 with x\n assume (~ (x `Set.mem` freevars_st e2_closed));\n if x `Set.mem` freevars (comp_post c2)\n then fail g' None \"Impossible: freevar clash when constructing continuation elaborator for bind, please file a bug-report\"\n else (\n let t_typing, post_typing =\n Pulse.Typing.Combinators.bind_res_and_post_typing g c2 x post_hint in\n let g = push_context g \"mk_bind\" e1.range in\n // info_doc g None\n // [prefix 4 1 (doc_of_string \"mk_bind e1 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e1));\n // prefix 4 1 (doc_of_string \"mk_bind c1 = \") (pp #comp c1);\n // prefix 4 1 (doc_of_string \"mk_bind e2 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e2));\n // prefix 4 1 (doc_of_string \"mk_bind c2 = \") (pp #comp c2)]\n // ;\n let (| e, c, e_typing |) =\n Pulse.Typing.Combinators.mk_bind\n g (tm_star ctxt pre1) \n e1 e2_closed c1 c2 (ppname, x) e1_typing\n u_of_1 \n e2_typing\n t_typing\n post_typing\n (Some? post_hint)\n in\n (| e, c, e_typing |)\n )\n in\n k\n#pop-options\n\nmodule LN = Pulse.Typing.LN\n#push-options \"--z3rlimit_factor 4 --fuel 1 --ifuel 1\"\n\n\nlet st_comp_typing_with_post_hint \n (#g:env) (#ctxt:_)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (post_hint:post_hint_opt g { Some? post_hint })\n (c:comp_st { comp_pre c == ctxt /\\ comp_post_matches_hint c post_hint })\n: st_comp_typing g (st_comp_of_comp c)\n= let st = st_comp_of_comp c in\n let Some ph = post_hint in\n let post_typing_src\n : tot_typing (push_binding ph.g ph.x ppname_default ph.ret_ty)\n (open_term ph.post ph.x) tm_vprop\n = ph.post_typing_src\n in\n let x = fresh g in\n assume (fresh_wrt x g (freevars ph.post));\n assume (None? (lookup g ph.x));\n let post_typing_src\n : tot_typing (push_binding ph.g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = if x = ph.x\n then post_typing_src\n else \n let open Pulse.Typing.Metatheory.Base in\n let tt :\n tot_typing\n (push_binding ph.g x ppname_default ph.ret_ty)\n (subst_term (open_term ph.post ph.x) (renaming ph.x x))\n (subst_term tm_vprop (renaming ph.x x)) =\n tot_typing_renaming1 ph.g ph.x ph.ret_ty (open_term ph.post ph.x) tm_vprop post_typing_src x\n in\n assert (subst_term tm_vprop (renaming ph.x x) == tm_vprop);\n assume (subst_term (open_term ph.post ph.x) (renaming ph.x x) ==\n open_term ph.post x);\n tt\n in\n let post_typing_src\n : tot_typing (push_binding g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = //weakening: TODO\n RU.magic ()\n in\n let ty_typing : universe_of ph.g st.res st.u = ph.ty_typing in\n let ty_typing : universe_of g st.res st.u =\n Pulse.Typing.Metatheory.tot_typing_weakening_standard ph.g ty_typing g\n in\n assert (st.res == ph.ret_ty);\n assert (st.post == ph.post);\n STC g st x ty_typing ctxt_typing post_typing_src\n\nlet continuation_elaborator_with_bind_fn (#g:env) (#ctxt:term)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#e1:st_term)\n (#c1:comp { C_Tot? c1 })\n (b:binder{b.binder_ty == comp_res c1})\n (e1_typing:st_typing g e1 c1)\n (x:nvar { None? (lookup g (snd x)) })\n: T.Tac (continuation_elaborator\n g ctxt\n (push_binding g (snd x) ppname_default (comp_res c1)) ctxt)\n= let t1 = comp_res c1 in\n assert ((push_binding g (snd x) (fst x) t1) `env_extends` g);\n fun post_hint (| e2, c2, d2 |) ->\n if None? post_hint then T.fail \"bind_fn: expects the post_hint to be set\";\n let ppname, x = x in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term (close_st_term e2 x) x == e2);\n let e = wr c2 (Tm_Bind {binder=b; head=e1; body=e2_closed}) in\n let (| u, c1_typing |) = Pulse.Typing.Metatheory.Base.st_typing_correctness_ctot e1_typing in\n let c2_typing : comp_typing g c2 (universe_of_comp c2) =\n match c2 with\n | C_ST st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_ST _ _ stc\n \n | C_STAtomic i obs st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n let i_typing = CP.core_check_term g i T.E_Total tm_inames in\n CT_STAtomic _ _ obs _ i_typing stc\n\n | C_STGhost st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_STGhost _ _ stc\n in\n let d : st_typing g e c2 =\n T_BindFn g e1 e2_closed c1 c2 b x e1_typing u c1_typing d2 c2_typing\n in\n (| e, c2, d |)\n\nlet rec check_equiv_emp (g:env) (vp:term)\n : option (vprop_equiv g vp tm_emp)\n = match vp.t with\n | Tm_Emp -> Some (VE_Refl _ _)\n | Tm_Star vp1 vp2 ->\n (match check_equiv_emp g vp1, check_equiv_emp g vp2 with\n | Some d1, Some d2 ->\n let d3 : vprop_equiv g (tm_star vp1 vp2) (tm_star tm_emp tm_emp)\n = VE_Ctxt _ _ _ _ _ d1 d2 in\n let d4 : vprop_equiv g (tm_star tm_emp tm_emp) tm_emp =\n VE_Unit _ _ in\n Some (VE_Trans _ _ _ _ d3 d4)\n | _, _ -> None)\n | _ -> None\n\n\n\nlet emp_inames_included (g:env) (i:term) (_:tot_typing g i tm_inames)\n: prop_validity g (tm_inames_subset tm_emp_inames i)\n= RU.magic()\n\nlet return_in_ctxt (g:env) (y:var) (y_ppname:ppname) (u:universe) (ty:term) (ctxt:vprop)\n (ty_typing:universe_of g ty u)\n (post_hint0:post_hint_opt g { Some? post_hint0 /\\ checker_res_matches_post_hint g post_hint0 y ty ctxt})\n: Pure (st_typing_in_ctxt g ctxt post_hint0)\n (requires lookup g y == Some ty)\n (ensures fun _ -> True)\n= let Some post_hint = post_hint0 in\n let x = fresh g in\n assume (~ (x `Set.mem` freevars post_hint.post));\n let ctag =\n match post_hint.effect_annot with\n | EffectAnnotAtomic _ -> STT_Atomic\n | EffectAnnotGhost -> STT_Ghost\n | _ -> STT\n in\n let y_tm = tm_var {nm_index=y;nm_ppname=y_ppname} in\n let d = T_Return g ctag false u ty y_tm post_hint.post x ty_typing\n (RU.magic ()) // that null_var y is well typed at ty in g, we know since lookup g y == Some ty\n (RU.magic ()) // typing of (open post x) in (g, x) ... post_hint is well-typed, so should get\n in\n let t = wtag (Some ctag) (Tm_Return {expected_type=tm_unknown;insert_eq=false;term=y_tm}) in\n let c = comp_return ctag false u ty y_tm post_hint.post x in\n let d : st_typing g t c = d in\n assume (comp_u c == post_hint.u); // this u should follow from equality of t\n match c, post_hint.effect_annot with\n | C_STAtomic _ obs _, EffectAnnotAtomic { opens } ->\n assert (comp_inames c == tm_emp_inames);\n let pht = post_hint_typing g post_hint x in\n let validity = emp_inames_included g opens pht.effect_annot_typing in\n let d = T_Sub _ _ _ _ d (STS_AtomicInvs _ (st_comp_of_comp c) tm_emp_inames opens obs obs validity) in\n (| _, _, d |)\n | _ -> \n (| _, _, d |)\n\nlet match_comp_res_with_post_hint (#g:env) (#t:st_term) (#c:comp_st)\n (d:st_typing g t c)\n (post_hint:post_hint_opt g)\n : T.Tac (t':st_term &\n c':comp_st &\n st_typing g t' c') =\n\n match post_hint with\n | None -> (| t, c, d |)\n | Some { ret_ty } ->\n let cres = comp_res c in\n if eq_tm cres ret_ty\n then (| t, c, d |)\n else match Pulse.Checker.Pure.check_equiv g cres ret_ty with\n | None ->\n fail g (Some t.range)\n (Printf.sprintf \"Could not prove equiv for computed type %s and expected type %s\"\n (P.term_to_string cres)\n (P.term_to_string ret_ty))\n | Some tok ->\n let d_equiv\n : RT.equiv _ (elab_term cres) (elab_term ret_ty) =\n RT.Rel_eq_token _ _ _ (FStar.Squash.return_squash tok) in\n \n let c' = with_st_comp c {(st_comp_of_comp c) with res = ret_ty } in\n let (| cres_typing, cpre_typing, x, cpost_typing |) =\n st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness d)) in\n\n let d_stequiv : st_equiv g c c' =\n ST_VPropEquiv _ c c' _ cpre_typing cres_typing cpost_typing d_equiv (VE_Refl _ _) (VE_Refl _ _)\n in\n\n (| t, c', T_Equiv _ _ _ _ d d_stequiv |)\n\nlet apply_checker_result_k (#g:env) (#ctxt:vprop) (#post_hint:post_hint_for_env g)\n (r:checker_result_t g ctxt (Some post_hint))\n (res_ppname:ppname)\n : T.Tac (st_typing_in_ctxt g ctxt (Some post_hint)) =\n\n // TODO: FIXME add to checker result type?\n let (| y, g1, (| u_ty, ty_y, d_ty_y |), (| pre', _ |), k |) = r in\n\n let (| u_ty_y, d_ty_y |) = Pulse.Checker.Pure.check_universe g1 ty_y in\n\n let d : st_typing_in_ctxt g1 pre' (Some post_hint) =\n return_in_ctxt g1 y res_ppname u_ty_y ty_y pre' d_ty_y (Some post_hint) in\n\n k (Some post_hint) d\n\n#push-options \"--z3rlimit_factor 4 --fuel 0 --ifuel 1 --query_stats\"\nlet checker_result_for_st_typing (#g:env) (#ctxt:vprop) (#post_hint:post_hint_opt g)\n (d:st_typing_in_ctxt g ctxt post_hint)\n (ppname:ppname)\n : T.Tac (checker_result_t g ctxt post_hint) =\n\n let (| t, c, d |) = d in\n \n let x = fresh g in\n\n let g' = push_binding g x ppname (comp_res c) in\n let ctxt' = open_term_nv (comp_post c) (ppname, x) in\n let k\n : continuation_elaborator\n g (tm_star tm_emp (comp_pre c))\n g' (tm_star ctxt' tm_emp) =\n continuation_elaborator_with_bind tm_emp d (RU.magic ()) (ppname, x) in\n let k\n : continuation_elaborator g (comp_pre c) g' ctxt' =\n k_elab_equiv k (RU.magic ()) (RU.magic ()) in\n\n let _ : squash (checker_res_matches_post_hint g post_hint x (comp_res c) ctxt') =\n match post_hint with\n | None -> ()\n | Some post_hint -> () in\n\n assert (g' `env_extends` g);\n\n let comp_res_typing, _, f =\n Metatheory.(st_comp_typing_inversion_cofinite (fst <| comp_typing_inversion (st_typing_correctness d))) in\n\n // RU.magic is the typing of comp_res in g'\n // weaken comp_res_typing\n\n assume (~ (x `Set.mem` freevars (comp_post c)));\n let tt : universe_of _ _ _ = RU.magic () in\n (| x, g', (| comp_u c, comp_res c, tt |), (| ctxt', f x |), k |)\n#pop-options\n\nmodule R = FStar.Reflection.V2\n\nlet readback_comp_res_as_comp (c:T.comp) : option comp =\n match c with\n | T.C_Total t -> (\n match readback_comp t with\n | None -> None\n | Some c -> Some c\n )\n | _ -> None\n\nlet rec is_stateful_arrow (g:env) (c:option comp) (args:list T.argv) (out:list T.argv)\n : T.Tac (option (list T.argv & T.argv))\n = let open R in\n match c with\n | None -> None\n | Some (C_ST _)\n | Some (C_STGhost _)\n | Some (C_STAtomic _ _ _) -> (\n match args, out with\n | [], hd::tl -> Some (List.rev tl, hd)\n | _ -> None //leftover or not enough args\n )\n\n | Some (C_Tot c_res) -> (\n if not (Tm_FStar? c_res.t)\n then None\n else (\n let Tm_FStar c_res = c_res.t in\n let ht = T.inspect c_res in\n match ht with\n | T.Tv_Arrow b c -> (\n match args with\n | [] -> ( //no more args; check that only implicits remain, ending in an stateful comp \n let bs, c = T.collect_arr_ln_bs c_res in\n if List.Tot.for_all (fun b -> R.Q_Implicit? (R.inspect_binder b).qual) bs\n then is_stateful_arrow g (readback_comp_res_as_comp (R.inspect_comp c)) [] out\n else None //too few args \n )\n\n | (arg, qual)::args' -> ( //check that this arg qual matches the binder and recurse accordingly\n match b.qual, qual with\n | T.Q_Meta _, T.Q_Implicit\n | T.Q_Implicit, T.Q_Implicit \n | T.Q_Explicit, T.Q_Explicit -> //consume this argument\n is_stateful_arrow g (readback_comp_res_as_comp c) args' ((arg, qual)::out)\n\n | T.Q_Meta _, T.Q_Explicit\n | T.Q_Implicit, T.Q_Explicit -> \n //don't consume this argument\n is_stateful_arrow g (readback_comp_res_as_comp c) args out\n\n | _ -> None //incompatible qualifiers; bail\n )\n )\n | _ ->\n let c_res' = RU.whnf_lax (elab_env g) c_res in\n let ht = T.inspect c_res' in\n if T.Tv_Arrow? ht\n then (\n assume (not_tv_unknown c_res');\n let c_res' = tm_fstar c_res' (T.range_of_term c_res') in\n is_stateful_arrow g (Some (C_Tot c_res')) args out\n )\n else None\n )\n )\n\nmodule RU = Pulse.RuntimeUtils \nlet is_stateful_application (g:env) (e:term) \n : T.Tac (option st_term)\n = match e.t with\n | Tm_FStar host_term -> (\n let head, args = T.collect_app_ln host_term in\n assume (not_tv_unknown head);\n match RU.lax_check_term_with_unknown_universes (elab_env g) head with\n | None -> None\n | Some ht -> \n assume (not_tv_unknown ht);\n let head_t = tm_fstar ht (T.range_of_term ht) in\n match is_stateful_arrow g (Some (C_Tot head_t)) args [] with \n | None -> None\n | Some (applied_args, (last_arg, aqual))->\n let head = T.mk_app head applied_args in\n assume (not_tv_unknown head);\n let head = tm_fstar head (T.range_of_term head) in\n assume (not_tv_unknown last_arg);\n let last_arg = tm_fstar last_arg (T.range_of_term last_arg) in\n let qual = \n match aqual with\n | T.Q_Implicit -> Some Implicit\n | _ -> None\n in\n let st_app = Tm_STApp { head; arg=last_arg; arg_qual=qual} in\n let st_app = { term = st_app; range=e.range; effect_tag=default_effect_hint } in\n Some st_app\n )\n | _ -> None\n\n\nlet apply_conversion\n (#g:env) (#e:term) (#eff:_) (#t0:term)\n (d:typing g e eff t0)\n (#t1:term)\n (eq:Ghost.erased (RT.related (elab_env g) (elab_term t0) RT.R_Eq (elab_term t1)))\n : typing g e eff t1\n = let d : RT.typing (elab_env g) (elab_term e) (eff, (elab_term t0)) = d._0 in\n let r : RT.related (elab_env g) (elab_term t0) RT.R_Eq (elab_term t1) = eq in\n let r = RT.Rel_equiv _ _ _ RT.R_Sub r in\n let s : RT.sub_comp (elab_env g) (eff, (elab_term t0)) (eff, elab_term t1) = \n RT.Relc_typ _ _ _ _ _ r\n in\n E (RT.T_Sub _ _ _ _ d s)\n \nlet norm_typing\n (g:env) (e:term) (eff:_) (t0:term)\n (d:typing g e eff t0)\n (steps:list norm_step)\n : T.Tac (t':term & typing g e eff t')\n = let t = elab_term t0 in\n let u_t_typing : Ghost.erased (u:R.universe & RT.typing _ _ _) = \n Pulse.Typing.Metatheory.Base.typing_correctness d._0\n in\n let (| t', t'_typing, related_t_t' |) =\n Pulse.RuntimeUtils.norm_well_typed_term (dsnd u_t_typing) steps\n in\n match Pulse.Readback.readback_ty t' with\n | None -> T.fail \"Could not readback normalized type\"\n | Some t'' ->\n let d : typing g e eff t'' = apply_conversion d related_t_t' in\n (| t'', d |)\n\nPartial implementation:\nlet norm_typing_inverse\n (g: env)\n (e: term)\n (eff: _)\n (t0: term)\n (d: typing g e eff t0)\n (t1: term)\n (#u: _)\n (d1: tot_typing g t1 (tm_type u))\n (steps: list norm_step)\n : T.Tac (option (typing g e eff t1)) =\n \n"
},
{
"role": "assistant",
"content": "let (| t1' , t1'_typing , related_t1_t1' |) =\n let d1 = Ghost.hide d1._0 in\n Pulse.RuntimeUtils.norm_well_typed_term d1 steps\nin\nmatch Pulse.Readback.readback_ty t1' with\n| Some t1_p ->\n if TermEq.term_eq (elab_term t0) t1'\n then\n (let related_t1'_t1 = Ghost.hide (RT.Rel_sym _ _ _ related_t1_t1') in\n Some (apply_conversion d related_t1'_t1))\n else None\n| _ -> None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nPartial implementation:\nlet eval_VPaddd (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_avx (eval_Paddd_raw src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: amount: Vale.Def.Types_s.nat8 -> src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nPartial implementation:\nlet eval_Palignr_raw (amount: nat8) (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "if amount = 4\nthen Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\nelse if amount = 8 then Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1) else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nPartial implementation:\nlet eval_Paddd (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_sse2 (eval_Paddd_raw src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n k: Pulse.Checker.Base.continuation_elaborator g1 ctxt1 g2 ctxt ->\n d: Pulse.Typing.vprop_equiv g1 ctxt1 ctxt2\n -> Pulse.Checker.Base.continuation_elaborator g1 ctxt2 g2 ctxt\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nlet simplify_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post)\n = st_equiv_post d post (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))\n\nlet simplify_lemma (c:comp_st) (c':comp_st) (post_hint:option post_hint_t)\n : Lemma\n (requires\n comp_post_matches_hint c post_hint /\\\n effect_annot_of_comp c == effect_annot_of_comp c' /\\\n comp_res c' == comp_res c /\\\n comp_u c' == comp_u c /\\\n comp_post c' == tm_star (comp_post c) tm_emp)\n (ensures comp_post_matches_hint (comp_st_with_post c' (comp_post c)) post_hint /\\\n comp_pre (comp_st_with_post c' (comp_post c)) == comp_pre c')\n = ()\n\nlet vprop_equiv_typing_bk (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g p ctxt)\n : tot_typing g p tm_vprop \n = let _, bk = vprop_equiv_typing d in\n bk ctxt_typing\n\nlet comp_with_pre (c:comp_st) (pre:term) =\n match c with\n | C_ST st -> C_ST { st with pre }\n | C_STGhost st -> C_STGhost { st with pre }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with pre}\n\n\nlet st_equiv_pre (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (pre:term)\n (veq: vprop_equiv g (comp_pre c) pre)\n : st_typing g t (comp_with_pre c pre)\n = if eq_tm pre (comp_pre c) then d\n else\n let c' = comp_with_pre c pre in\n let (| u_of, pre_typing, x, post_typing |) =\n Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) veq (VE_Refl _ _)\n in\n t_equiv d st_equiv\n\n\n#push-options \"--z3rlimit_factor 4 --ifuel 2 --fuel 0\"\nlet k_elab_equiv_continuation (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt #ctxt1 #ctxt2:term)\n (k:continuation_elaborator g1 ctxt g2 ctxt1)\n (d:vprop_equiv g2 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt g2 ctxt2 =\n fun post_hint res ->\n let (| st, c, st_d |) = res in\n let st_d : st_typing g2 st c = st_d in\n assert (comp_pre c == ctxt2);\n let st_d' : st_typing g2 st (comp_with_pre c ctxt1) = st_equiv_pre st_d _ (VE_Sym _ _ _ d) in\n k post_hint (| st, _, st_d' |)\n#pop-options\n\nlet vprop_equiv_typing_fwd (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g ctxt p)\n : tot_typing g p tm_vprop \n = let fwd, _ = vprop_equiv_typing d in\n fwd ctxt_typing\n\nPartial implementation:\nlet k_elab_equiv_prefix\n (#g1: env)\n (#g2: env{g2 `env_extends` g1})\n (#ctxt1 #ctxt2 #ctxt: term)\n (k: continuation_elaborator g1 ctxt1 g2 ctxt)\n (d: vprop_equiv g1 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt2 g2 ctxt =\n \n"
},
{
"role": "assistant",
"content": "fun post_hint res ->\n let framing_token:frame_for_req_in_ctxt g1 ctxt2 ctxt1 =\n let d = VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Trans _ _ _ _ (VE_Unit _ _) d) in\n (| tm_emp, emp_typing, d |)\n in\n let res = k post_hint res in\n let (| st , c , st_d |) = res in\n assert (comp_pre c == ctxt1);\n (| _, _, st_equiv_pre st_d _ d |)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n alg: Vale.AES.AES_common_s.algorithm ->\n in_b: Vale.X64.Memory.buffer128 ->\n out_b: Vale.X64.Memory.buffer128 ->\n key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->\n round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n keys_b: Vale.X64.Memory.buffer128 ->\n hkeys_b: Vale.X64.Memory.buffer128 ->\n h_LE: Vale.X64.Decls.quad32\n -> Vale.X64.QuickCode.va_quickCode Prims.unit\n (Vale.AES.X64.GCMdecryptOpt.va_code_Gcm_blocks128 alg)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key\n round_keys keys_b hkeys_b total_bytes old_hash completed_quads h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n (va_QProc (va_code_Gcm_extra_bytes alg) ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem])\n (va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE) (va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE))\n//--\n//-- Gcm_blocks128\n\nval va_code_Gcm_blocks128 : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks128 alg =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_CCons\n (va_code_Ghash_buffer ()) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRbx)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR12)) (va_CCons (va_code_Gctr_blocks128 alg) (va_CNil ())))))))))\n\nval va_codegen_success_Gcm_blocks128 : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks128 alg =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax))\n (va_pbool_and (va_codegen_success_Ghash_buffer ()) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_pbool_and\n (va_codegen_success_Gctr_blocks128 alg) (va_ttrue ()))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks128 (va_mods:va_mods_t) (alg:algorithm) (in_b:buffer128) (out_b:buffer128)\n (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)\n : (va_quickCode unit (va_code_Gcm_blocks128 alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 274 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 275 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 276 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 277 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_buffer hkeys_b in_b h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_old_s))) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 278 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 279 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 280 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_QEmpty (()))))))))))\n\n\nval va_lemma_Gcm_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->\n out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks128 alg) va_s0 /\\ va_get_ok va_s0 /\\\n (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) /\\ va_state_eq va_sM (va_update_flags va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 10 va_sM (va_update_xmm 11 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR11 va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks128 va_b0 va_s0 alg in_b out_b key round_keys keys_b hkeys_b h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks128 va_mods alg in_b out_b key round_keys keys_b hkeys_b h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks128 alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 210 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (label va_range1\n \"***** POSTCONDITION NOT MET AT line 255 column 53 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 261 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 262 column 45 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx\n va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 265 column 93 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM == va_get_xmm 8 va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) out_b)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 267 column 131 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 > 0 ==> l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==>\n FStar.Seq.Base.length #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) (va_s0:va_state)\n (va_k:(va_state -> unit -> Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret) /\\ (forall (va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_rdi:nat64) (va_x_r11:nat64)\n (va_x_r10:nat64) (va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32)\n (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl\n (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11\n (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5\n (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1\n (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64\n rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbx\n va_x_rbx (va_upd_mem va_x_mem va_s0))))))))))))))))))) in va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq\n nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32 ->\n va_s0:va_state -> va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b\n h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks128 alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;\n va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;\n va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks128 (va_code_Gcm_blocks128 alg) va_s0 alg in_b out_b key\n round_keys keys_b hkeys_b h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 11 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6\n va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2\n va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR12 va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM (va_update_ok va_sM (va_update_mem\n va_sM va_s0))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\nPartial implementation:\nlet va_quick_Gcm_blocks128\n (alg: algorithm)\n (in_b out_b: buffer128)\n (key: (seq nat32))\n (round_keys: (seq quad32))\n (keys_b hkeys_b: buffer128)\n (h_LE: quad32)\n : (va_quickCode unit (va_code_Gcm_blocks128 alg)) =\n \n"
},
{
"role": "assistant",
"content": "(va_QProc (va_code_Gcm_blocks128 alg)\n ([\n va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7;\n va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;\n va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem\n ])\n (va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE)\n (va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n d: Pulse.Typing.st_typing g t c ->\n post:\n Pulse.Syntax.Base.term\n { FStar.Set.subset (Pulse.Syntax.Naming.freevars post)\n (Pulse.Syntax.Naming.freevars (Pulse.Syntax.Base.comp_post c)) } ->\n veq:\n (\n x:\n Pulse.Syntax.Base.var\n { Pulse.Typing.fresh_wrt x\n g\n (Pulse.Syntax.Naming.freevars (Pulse.Syntax.Base.comp_post c)) }\n -> Pulse.Typing.vprop_equiv (Pulse.Typing.Env.push_binding g\n x\n Pulse.Syntax.Base.ppname_default\n (Pulse.Syntax.Base.comp_res c))\n (Pulse.Syntax.Naming.open_term (Pulse.Syntax.Base.comp_post c) x)\n (Pulse.Syntax.Naming.open_term post x))\n -> Pulse.Typing.st_typing g t (Pulse.Checker.Base.comp_st_with_post c post)\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nPartial implementation:\nlet st_equiv_post\n (#g: env)\n (#t: st_term)\n (#c: comp_st)\n (d: st_typing g t c)\n (post: term{(freevars post) `Set.subset` (freevars (comp_post c))})\n (veq:\n (x: var{fresh_wrt x g (freevars (comp_post c))}\n -> vprop_equiv (push_binding g x ppname_default (comp_res c))\n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post) =\n \n"
},
{
"role": "assistant",
"content": "if eq_tm post (comp_post c)\nthen d\nelse\n let c' = comp_st_with_post c post in\n let (| u_of , pre_typing , x , post_typing |) =\n let open Metatheory in\n st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))\n in\n let veq = veq x in\n let st_equiv:st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: amt: Prims.int -> dst: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nPartial implementation:\nlet eval_Psrld (amt: int) (dst: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: old_carry: Prims.bool -> dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Prims.bool * Vale.X64.Machine_s.nat64)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nPartial implementation:\nlet eval_Sbb64 (old_carry: bool) (dst src: nat64) : option (bool & nat64) =\n \n"
},
{
"role": "assistant",
"content": "let diff = dst - (src + (if old_carry then 1 else 0)) in\nSome (diff < 0, diff % pow2_64)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n alg: Vale.AES.AES_common_s.algorithm ->\n inout_b: Vale.X64.Memory.buffer128 ->\n key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->\n round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n keys_b: Vale.X64.Memory.buffer128 ->\n hkeys_b: Vale.X64.Memory.buffer128 ->\n total_bytes: Prims.nat ->\n old_hash: Vale.X64.Decls.quad32 ->\n completed_quads: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n h_LE: Vale.X64.Decls.quad32 ->\n va_s0: Vale.X64.Decls.va_state ->\n va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)\n -> Type0\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\nPartial implementation:\nlet va_wp_Gcm_extra_bytes\n (alg: algorithm)\n (inout_b: buffer128)\n (key: (seq nat32))\n (round_keys: (seq quad32))\n (keys_b hkeys_b: buffer128)\n (total_bytes: nat)\n (old_hash: quad32)\n (completed_quads: (seq quad32))\n (h_LE: quad32)\n (va_s0: va_state)\n (va_k: (va_state -> unit -> Type0))\n : Type0 =\n \n"
},
{
"role": "assistant",
"content": "(va_get_ok va_s0 /\\\n (let len:(va_int_range 1 1) = 1 in\n sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0)\n (va_get_reg64 rRax va_s0)\n inout_b\n len\n (va_get_mem_layout va_s0)\n Secret /\\ len == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b /\\\n va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n aes_reqs alg\n key\n round_keys\n keys_b\n (va_get_reg64 rR8 va_s0)\n (va_get_mem_heaplet 0 va_s0)\n (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)\n (va_get_reg64 rR9 va_s0 - 32)\n hkeys_b\n 8\n (va_get_mem_layout va_s0)\n Secret /\\\n va_get_xmm 8 va_s0 ==\n Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE\n old_hash\n completed_quads) /\\\n FStar.Seq.Base.length #quad32 completed_quads == total_bytes `op_Division` 16 /\\\n total_bytes < 16 `op_Multiply` (FStar.Seq.Base.length #quad32 completed_quads) + 16 /\\\n va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\\n (0 < total_bytes /\\\n total_bytes < 16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes)) /\\\n 16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\\n (forall (va_x_mem: vale_heap) (va_x_rcx: nat64) (va_x_r11: nat64) (va_x_xmm0: quad32)\n (va_x_xmm1: quad32) (va_x_xmm2: quad32) (va_x_xmm3: quad32) (va_x_xmm4: quad32)\n (va_x_xmm5: quad32) (va_x_xmm6: quad32) (va_x_xmm7: quad32) (va_x_xmm8: quad32)\n (va_x_xmm10: quad32) (va_x_heap5: vale_heap) (va_x_efl: Vale.X64.Flags.t).\n let va_sM =\n va_upd_flags va_x_efl\n (va_upd_mem_heaplet 5\n va_x_heap5\n (va_upd_xmm 10\n va_x_xmm10\n (va_upd_xmm 8\n va_x_xmm8\n (va_upd_xmm 7\n va_x_xmm7\n (va_upd_xmm 6\n va_x_xmm6\n (va_upd_xmm 5\n va_x_xmm5\n (va_upd_xmm 4\n va_x_xmm4\n (va_upd_xmm 3\n va_x_xmm3\n (va_upd_xmm 2\n va_x_xmm2\n (va_upd_xmm 1\n va_x_xmm1\n (va_upd_xmm 0\n va_x_xmm0\n (va_upd_reg64 rR11\n va_x_r11\n (va_upd_reg64 rRcx\n va_x_rcx\n (va_upd_mem va_x_mem va_s0))))))))))))\n ))\n in\n va_get_ok va_sM /\\\n (let len:(va_int_range 1 1) = 1 in\n Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0)\n (va_get_mem_heaplet 5 va_sM) /\\\n Vale.AES.GCTR.gctr_partial alg\n len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b)\n key\n (va_get_xmm 11 va_s0) /\\\n (let raw_quads =\n FStar.Seq.Base.append #quad32\n completed_quads\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n in\n let input_bytes =\n FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)\n 0\n total_bytes\n in\n let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in\n let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in\n l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) ==\n Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==>\n va_k va_sM (())))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: amount: Vale.Def.Types_s.nat8 -> src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nPartial implementation:\nlet eval_Palignr (amount: nat8) (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_ssse3 (eval_Palignr_raw amount src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: permutation: Prims.int -> src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nPartial implementation:\nlet eval_Shufpd_raw (permutation: int) (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "if 0 <= permutation && permutation < 4\nthen\n Some\n (Mkfour (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\nelse None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: alg: Vale.AES.AES_common_s.algorithm -> offset: Prims.int -> Vale.X64.Decls.va_code\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key\n round_keys keys_b hkeys_b total_bytes old_hash completed_quads h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n (va_QProc (va_code_Gcm_extra_bytes alg) ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem])\n (va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE) (va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE))\n//--\n//-- Gcm_blocks128\n\nval va_code_Gcm_blocks128 : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks128 alg =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_CCons\n (va_code_Ghash_buffer ()) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRbx)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR12)) (va_CCons (va_code_Gctr_blocks128 alg) (va_CNil ())))))))))\n\nval va_codegen_success_Gcm_blocks128 : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks128 alg =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax))\n (va_pbool_and (va_codegen_success_Ghash_buffer ()) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_pbool_and\n (va_codegen_success_Gctr_blocks128 alg) (va_ttrue ()))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks128 (va_mods:va_mods_t) (alg:algorithm) (in_b:buffer128) (out_b:buffer128)\n (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)\n : (va_quickCode unit (va_code_Gcm_blocks128 alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 274 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 275 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 276 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 277 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_buffer hkeys_b in_b h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_old_s))) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 278 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 279 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 280 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_QEmpty (()))))))))))\n\n\nval va_lemma_Gcm_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->\n out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks128 alg) va_s0 /\\ va_get_ok va_s0 /\\\n (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) /\\ va_state_eq va_sM (va_update_flags va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 10 va_sM (va_update_xmm 11 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR11 va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks128 va_b0 va_s0 alg in_b out_b key round_keys keys_b hkeys_b h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks128 va_mods alg in_b out_b key round_keys keys_b hkeys_b h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks128 alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 210 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (label va_range1\n \"***** POSTCONDITION NOT MET AT line 255 column 53 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 261 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 262 column 45 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx\n va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 265 column 93 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM == va_get_xmm 8 va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) out_b)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 267 column 131 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 > 0 ==> l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==>\n FStar.Seq.Base.length #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) (va_s0:va_state)\n (va_k:(va_state -> unit -> Type0)) : Type0 =\n (va_get_ok va_s0 /\\ (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret) /\\ (forall (va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_rdi:nat64) (va_x_r11:nat64)\n (va_x_r10:nat64) (va_x_rdx:nat64) (va_x_r12:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32)\n (va_x_heap1:vale_heap) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl\n (va_upd_mem_heaplet 1 va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11\n (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5\n (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1\n (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64\n rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbx\n va_x_rbx (va_upd_mem va_x_mem va_s0))))))))))))))))))) in va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq\n nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32 ->\n va_s0:va_state -> va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b\n h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks128 alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;\n va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;\n va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi;\n va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_blocks128 (va_code_Gcm_blocks128 alg) va_s0 alg in_b out_b key\n round_keys keys_b hkeys_b h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 11 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6\n va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2\n va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR12 va_sM\n (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM\n (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM (va_update_ok va_sM (va_update_mem\n va_sM va_s0))))))))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))\n (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) : (va_quickCode\n unit (va_code_Gcm_blocks128 alg)) =\n (va_QProc (va_code_Gcm_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;\n va_Mod_xmm 11; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm\n 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64\n rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_mem])\n (va_wp_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE)\n (va_wpProof_Gcm_blocks128 alg in_b out_b key round_keys keys_b hkeys_b h_LE))\n//--\n//-- Gcm_blocks\n#push-options \"--z3rlimit 1000\"\n\nval va_code_Gcm_blocks : alg:algorithm -> offset:int -> Tot va_code\n\nPartial implementation:\nlet va_code_Gcm_blocks alg offset =\n \n"
},
{
"role": "assistant",
"content": "(va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR13) (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_AddLea64 (va_op_dst_opr64_reg64 rR9)\n (va_op_opr64_reg64 rR9)\n (va_const_opr64 32))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRbx)\n (va_op_reg_opr64_reg64 rRsp)\n (offset + 0))\n (va_CCons (va_code_Gcm_blocks_auth ())\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdi)\n (va_op_reg_opr64_reg64 rRsp)\n (offset + 8))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRsi)\n (va_op_reg_opr64_reg64 rRsp)\n (offset + 16))\n (va_CCons (va_code_Load64_stack (va_op_dst_opr64_reg64 rRdx)\n (va_op_reg_opr64_reg64 rRsp)\n (offset + 24))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRcx)\n (va_op_opr64_reg64 rR13))\n (va_CCons (va_code_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 9))\n (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet\n 2)\n (va_op_xmm_xmm 1)\n (va_op_reg_opr64_reg64 rR8)\n 0\n Public)\n (va_CCons (va_code_Store128_buffer (va_op_heaplet_mem_heaplet\n 3)\n (va_op_reg_opr64_reg64 rRbp)\n (va_op_xmm_xmm 1)\n 0\n Secret)\n (va_CCons (va_code_Load_one_lsb (va_op_xmm_xmm 10))\n (va_CCons (va_code_VPaddd (va_op_xmm_xmm 1)\n (va_op_xmm_xmm 1)\n (va_op_xmm_xmm 10))\n (va_CCons (va_code_AES_GCM_decrypt_6mult alg\n )\n (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet\n 3)\n (va_op_xmm_xmm 11)\n (va_op_reg_opr64_reg64 rRbp)\n 32\n Secret)\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64\n rR8)\n (va_op_opr64_reg64 rRcx))\n (va_CCons (va_code_Load64_stack (\n va_op_dst_opr64_reg64 rRax\n )\n (va_op_reg_opr64_reg64 rRsp\n )\n (offset + 32))\n (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64\n rRdi)\n (va_op_reg_opr64_reg64\n rRsp)\n (offset + 40))\n (va_CCons (va_code_Load64_stack\n (va_op_dst_opr64_reg64\n rRdx)\n (va_op_reg_opr64_reg64\n rRsp)\n (offset + 48))\n (va_CCons (va_code_Mov64\n (va_op_dst_opr64_reg64\n rR14)\n (va_op_opr64_reg64\n rRdx))\n (va_CCons (va_code_InitPshufbMask\n (va_op_xmm_xmm\n 9)\n (va_op_reg_opr64_reg64\n rR12\n ))\n (va_CCons (va_code_Pshufb\n (va_op_xmm_xmm\n 11\n )\n (va_op_xmm_xmm\n 9\n ))\n (va_CCons\n (va_code_Gcm_blocks128\n alg\n )\n (va_CCons\n (\n va_code_Stack_lemma\n ()\n\n )\n (\n va_CCons\n (\n va_code_Add64\n (\n va_op_dst_opr64_reg64\n rR14\n\n )\n (\n va_opr_code_Stack\n (\n va_op_reg64_reg64\n rRsp\n\n )\n (\n offset +\n 24\n )\n Public\n\n )\n\n )\n (\n va_CCons\n (\n va_code_IMul64\n (\n va_op_dst_opr64_reg64\n rR14\n\n )\n (\n va_const_opr64\n 16\n\n )\n\n )\n (\n va_CCons\n (\n va_code_Load64_stack\n (\n va_op_dst_opr64_reg64\n rR13\n\n )\n (\n va_op_reg_opr64_reg64\n rRsp\n\n )\n (\n offset +\n 64\n )\n\n )\n (\n va_CCons\n (\n va_IfElse\n (\n va_cmp_gt\n (\n va_op_cmp_reg64\n rR13\n\n )\n (\n va_op_cmp_reg64\n rR14\n\n )\n\n )\n (\n va_Block\n (\n va_CCons\n (\n va_code_Load64_stack\n (\n va_op_dst_opr64_reg64\n rRax\n\n )\n (\n va_op_reg_opr64_reg64\n rRsp\n\n )\n (\n offset +\n 56\n )\n\n )\n (\n va_CCons\n (\n va_code_Mov64\n (\n va_op_dst_opr64_reg64\n rR10\n\n )\n (\n va_op_opr64_reg64\n rR13\n\n )\n\n )\n (\n va_CCons\n (\n va_code_And64\n (\n va_op_dst_opr64_reg64\n rR10\n\n )\n (\n va_const_opr64\n 15\n\n )\n\n )\n (\n va_CCons\n (\n va_code_Gcm_extra_bytes\n alg\n\n )\n (\n va_CCons\n (\n va_Block\n (\n va_CNil\n ()\n\n )\n\n )\n (\n va_CNil\n ()\n\n )\n\n )\n\n )\n\n )\n\n )\n\n )\n\n )\n (\n va_Block\n (\n va_CNil\n ()\n\n )\n\n )\n\n )\n (\n va_CCons\n (\n va_code_Mov64\n (\n va_op_dst_opr64_reg64\n rR11\n\n )\n (\n va_op_opr64_reg64\n rR15\n\n )\n\n )\n (\n va_CCons\n (\n va_code_Gcm_make_length_quad\n ()\n\n )\n (\n va_CCons\n (\n va_code_Ghash_register\n ()\n\n )\n (\n va_CCons\n (\n va_code_Load128_buffer\n (\n va_op_heaplet_mem_heaplet\n 3\n\n )\n (\n va_op_xmm_xmm\n 0\n\n )\n (\n va_op_reg_opr64_reg64\n rRbp\n\n )\n 0\n Secret\n\n )\n (\n va_CCons\n (\n va_code_Gctr_register\n alg\n\n )\n (\n va_CCons\n (\n va_Block\n (\n va_CNil\n ()\n\n )\n\n )\n (\n va_CNil\n ()\n\n )\n\n )\n\n )\n\n )\n\n )\n\n )\n\n )\n\n )\n\n )\n\n )\n\n )\n ))\n ))))))))))))\n )))))))))))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: permutation: Prims.int -> src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nPartial implementation:\nlet eval_Shufpd (permutation: int) (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_sse2 (eval_Shufpd_raw permutation src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: amount: Vale.Def.Types_s.nat8 -> src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nPartial implementation:\nlet eval_VPalignr (amount: nat8) (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_avx (eval_Palignr_raw amount src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: permutation: Prims.int -> src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nPartial implementation:\nlet eval_VShufpd (permutation: int) (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_avx (eval_Shufpd_raw permutation src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: old_carry: Prims.bool -> dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Prims.bool * Vale.X64.Machine_s.nat64)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nPartial implementation:\nlet eval_AddCarry64 (old_carry: bool) (dst src: nat64) : option (bool & nat64) =\n \n"
},
{
"role": "assistant",
"content": "let sum = dst + src + (if old_carry then 1 else 0) in\nSome (sum >= pow2_64, sum % pow2_64)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n alg: Vale.AES.AES_common_s.algorithm ->\n inout_b: Vale.X64.Memory.buffer128 ->\n key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->\n round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n keys_b: Vale.X64.Memory.buffer128 ->\n hkeys_b: Vale.X64.Memory.buffer128 ->\n total_bytes: Prims.nat ->\n old_hash: Vale.X64.Decls.quad32 ->\n completed_quads: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n h_LE: Vale.X64.Decls.quad32\n -> Vale.X64.QuickCode.va_quickCode Prims.unit\n (Vale.AES.X64.GCMdecryptOpt.va_code_Gcm_extra_bytes alg)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key\n round_keys keys_b hkeys_b total_bytes old_hash completed_quads h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\nPartial implementation:\nlet va_quick_Gcm_extra_bytes\n (alg: algorithm)\n (inout_b: buffer128)\n (key: (seq nat32))\n (round_keys: (seq quad32))\n (keys_b hkeys_b: buffer128)\n (total_bytes: nat)\n (old_hash: quad32)\n (completed_quads: (seq quad32))\n (h_LE: quad32)\n : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n \n"
},
{
"role": "assistant",
"content": "(va_QProc (va_code_Gcm_extra_bytes alg)\n ([\n va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;\n va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;\n va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem\n ])\n (va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE)\n (va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n k: Pulse.Checker.Base.continuation_elaborator g1 ctxt g2 ctxt1 ->\n d: Pulse.Typing.vprop_equiv g2 ctxt1 ctxt2\n -> Pulse.Checker.Base.continuation_elaborator g1 ctxt g2 ctxt2\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nlet simplify_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post)\n = st_equiv_post d post (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))\n\nlet simplify_lemma (c:comp_st) (c':comp_st) (post_hint:option post_hint_t)\n : Lemma\n (requires\n comp_post_matches_hint c post_hint /\\\n effect_annot_of_comp c == effect_annot_of_comp c' /\\\n comp_res c' == comp_res c /\\\n comp_u c' == comp_u c /\\\n comp_post c' == tm_star (comp_post c) tm_emp)\n (ensures comp_post_matches_hint (comp_st_with_post c' (comp_post c)) post_hint /\\\n comp_pre (comp_st_with_post c' (comp_post c)) == comp_pre c')\n = ()\n\nlet vprop_equiv_typing_bk (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g p ctxt)\n : tot_typing g p tm_vprop \n = let _, bk = vprop_equiv_typing d in\n bk ctxt_typing\n\nlet comp_with_pre (c:comp_st) (pre:term) =\n match c with\n | C_ST st -> C_ST { st with pre }\n | C_STGhost st -> C_STGhost { st with pre }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with pre}\n\n\nlet st_equiv_pre (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (pre:term)\n (veq: vprop_equiv g (comp_pre c) pre)\n : st_typing g t (comp_with_pre c pre)\n = if eq_tm pre (comp_pre c) then d\n else\n let c' = comp_with_pre c pre in\n let (| u_of, pre_typing, x, post_typing |) =\n Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) veq (VE_Refl _ _)\n in\n t_equiv d st_equiv\n\nPartial implementation:\nlet k_elab_equiv_continuation\n (#g1: env)\n (#g2: env{g2 `env_extends` g1})\n (#ctxt #ctxt1 #ctxt2: term)\n (k: continuation_elaborator g1 ctxt g2 ctxt1)\n (d: vprop_equiv g2 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt g2 ctxt2 =\n \n"
},
{
"role": "assistant",
"content": "fun post_hint res ->\n let (| st , c , st_d |) = res in\n let st_d:st_typing g2 st c = st_d in\n assert (comp_pre c == ctxt2);\n let st_d':st_typing g2 st (comp_with_pre c ctxt1) = st_equiv_pre st_d _ (VE_Sym _ _ _ d) in\n k post_hint (| st, _, st_d' |)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: q: Vale.X64.Machine_s.quad32 -> Prims.bool\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nPartial implementation:\nlet is_full_byte_reversal_mask (q: quad32) : bool =\n \n"
},
{
"role": "assistant",
"content": "q.lo0 = 0x0C0D0E0F && q.lo1 = 0x08090A0B && q.hi2 = 0x04050607 && q.hi3 = 0x00010203"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src1: Vale.X64.Machine_s.nat64 -> src2: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nPartial implementation:\nlet eval_AddLea64 (src1 src2: nat64) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "Some ((src1 + src2) % pow2_64)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: old_flag: Prims.bool -> dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Prims.bool * Vale.X64.Machine_s.nat64)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nPartial implementation:\nlet eval_Adcx64_Adox64 (old_flag: bool) (dst src: nat64) : option (bool & nat64) =\n \n"
},
{
"role": "assistant",
"content": "let sum = dst + src + (if old_flag then 1 else 0) in\nif adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: rdx: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Vale.X64.Machine_s.nat64 * Vale.X64.Machine_s.nat64)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nPartial implementation:\nlet eval_Mulx64 (rdx src: nat64) : option (nat64 & nat64) =\n \n"
},
{
"role": "assistant",
"content": "let hi = FStar.UInt.mul_div #64 rdx src in\nlet lo = FStar.UInt.mul_mod #64 rdx src in\nif bmi2_enabled then Some (hi, lo) else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Prims.bool * Vale.X64.Machine_s.nat64)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nPartial implementation:\nlet eval_Sub64 (dst src: nat64) : option (bool & nat64) =\n \n"
},
{
"role": "assistant",
"content": "let diff = dst - src in\nSome (diff < 0, diff % pow2_64)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n d: Pulse.Typing.st_typing g e (Pulse.Syntax.Base.C_Tot t0) ->\n t1: Pulse.Syntax.Base.term ->\n d1: Pulse.Typing.tot_typing g t1 (Pulse.Syntax.Pure.tm_type u575) ->\n steps: Prims.list FStar.Pervasives.norm_step\n -> FStar.Tactics.Effect.Tac\n (FStar.Pervasives.Native.option (Pulse.Typing.st_typing g e (Pulse.Syntax.Base.C_Tot t1)))\nEffect: FStar.Tactics.Effect.Tac\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nlet simplify_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post)\n = st_equiv_post d post (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))\n\nlet simplify_lemma (c:comp_st) (c':comp_st) (post_hint:option post_hint_t)\n : Lemma\n (requires\n comp_post_matches_hint c post_hint /\\\n effect_annot_of_comp c == effect_annot_of_comp c' /\\\n comp_res c' == comp_res c /\\\n comp_u c' == comp_u c /\\\n comp_post c' == tm_star (comp_post c) tm_emp)\n (ensures comp_post_matches_hint (comp_st_with_post c' (comp_post c)) post_hint /\\\n comp_pre (comp_st_with_post c' (comp_post c)) == comp_pre c')\n = ()\n\nlet vprop_equiv_typing_bk (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g p ctxt)\n : tot_typing g p tm_vprop \n = let _, bk = vprop_equiv_typing d in\n bk ctxt_typing\n\nlet comp_with_pre (c:comp_st) (pre:term) =\n match c with\n | C_ST st -> C_ST { st with pre }\n | C_STGhost st -> C_STGhost { st with pre }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with pre}\n\n\nlet st_equiv_pre (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (pre:term)\n (veq: vprop_equiv g (comp_pre c) pre)\n : st_typing g t (comp_with_pre c pre)\n = if eq_tm pre (comp_pre c) then d\n else\n let c' = comp_with_pre c pre in\n let (| u_of, pre_typing, x, post_typing |) =\n Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) veq (VE_Refl _ _)\n in\n t_equiv d st_equiv\n\n\n#push-options \"--z3rlimit_factor 4 --ifuel 2 --fuel 0\"\nlet k_elab_equiv_continuation (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt #ctxt1 #ctxt2:term)\n (k:continuation_elaborator g1 ctxt g2 ctxt1)\n (d:vprop_equiv g2 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt g2 ctxt2 =\n fun post_hint res ->\n let (| st, c, st_d |) = res in\n let st_d : st_typing g2 st c = st_d in\n assert (comp_pre c == ctxt2);\n let st_d' : st_typing g2 st (comp_with_pre c ctxt1) = st_equiv_pre st_d _ (VE_Sym _ _ _ d) in\n k post_hint (| st, _, st_d' |)\n#pop-options\n\nlet vprop_equiv_typing_fwd (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g ctxt p)\n : tot_typing g p tm_vprop \n = let fwd, _ = vprop_equiv_typing d in\n fwd ctxt_typing\n\n#push-options \"--z3rlimit_factor 4 --ifuel 1 --fuel 0\"\nlet k_elab_equiv_prefix\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt2 #ctxt:term)\n (k:continuation_elaborator g1 ctxt1 g2 ctxt)\n (d:vprop_equiv g1 ctxt1 ctxt2)\n : continuation_elaborator g1 ctxt2 g2 ctxt =\n fun post_hint res ->\n let framing_token : frame_for_req_in_ctxt g1 ctxt2 ctxt1 = \n let d = VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Trans _ _ _ _ (VE_Unit _ _) d) in\n (| tm_emp, emp_typing, d |)\n in\n let res = k post_hint res in\n let (| st, c, st_d |) = res in\n assert (comp_pre c == ctxt1);\n (| _, _, st_equiv_pre st_d _ d |)\n #pop-options\n\nlet k_elab_equiv\n (#g1:env) (#g2:env { g2 `env_extends` g1 }) (#ctxt1 #ctxt1' #ctxt2 #ctxt2':term) \n (k:continuation_elaborator g1 ctxt1 g2 ctxt2)\n (d1:vprop_equiv g1 ctxt1 ctxt1')\n (d2:vprop_equiv g2 ctxt2 ctxt2')\n : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n \n let k : continuation_elaborator g1 ctxt1 g2 ctxt2' =\n k_elab_equiv_continuation k d2 in\n let k : continuation_elaborator g1 ctxt1' g2 ctxt2' =\n k_elab_equiv_prefix k d1 in\n k\n\n#push-options \"--query_stats --fuel 2 --ifuel 2 --split_queries no --z3rlimit_factor 20\"\nopen Pulse.PP\nlet continuation_elaborator_with_bind (#g:env) (ctxt:term)\n (#c1:comp{stateful_comp c1})\n (#e1:st_term)\n (e1_typing:st_typing g e1 c1)\n (ctxt_pre1_typing:tot_typing g (tm_star ctxt (comp_pre c1)) tm_vprop)\n (x:nvar { None? (lookup g (snd x)) })\n : T.Tac (continuation_elaborator\n g\n (tm_star ctxt (comp_pre c1))\n (push_binding g (snd x) (fst x) (comp_res c1))\n (tm_star (open_term (comp_post c1) (snd x)) ctxt)) =\n\n let pre1 = comp_pre c1 in\n let res1 = comp_res c1 in\n let post1 = comp_post c1 in\n let ctxt_typing = star_typing_inversion_l ctxt_pre1_typing in\n // let p_prop = Metatheory.pure_typing_inversion pure_typing in\n let v_eq = VE_Comm g ctxt pre1 in\n let framing_token : frame_for_req_in_ctxt g (tm_star ctxt pre1) pre1 = \n (| ctxt, ctxt_typing, VE_Comm g pre1 ctxt |)\n in\n let (| c1, e1_typing |) =\n apply_frame ctxt_pre1_typing e1_typing framing_token in\n let (| u_of_1, pre_typing, _, _ |) =\n Metatheory.(st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness e1_typing))) in\n let b = res1 in\n let ppname, x = x in\n let g' = push_binding g x ppname b in\n \n let post1_opened = open_term_nv post1 (v_as_nv x) in\n let k : continuation_elaborator g (tm_star ctxt pre1) g' (tm_star post1_opened ctxt) =\n fun post_hint res ->\n let (| e2, c2, e2_typing |) = res in\n assert (comp_post_matches_hint c2 post_hint);\n let e2_typing : st_typing g' e2 c2 = e2_typing in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term e2_closed x == e2);\n assert (comp_pre c1 == (tm_star ctxt pre1));\n assert (comp_post c1 == tm_star post1 ctxt);\n assert (comp_pre c2 == tm_star post1_opened ctxt);\n assert (open_term (comp_post c1) x == tm_star post1_opened (open_term ctxt x));\n // ctxt is well-typed, hence ln\n assume (open_term ctxt x == ctxt);\n assert (open_term (comp_post c1) x == comp_pre c2);\n // we closed e2 with x\n assume (~ (x `Set.mem` freevars_st e2_closed));\n if x `Set.mem` freevars (comp_post c2)\n then fail g' None \"Impossible: freevar clash when constructing continuation elaborator for bind, please file a bug-report\"\n else (\n let t_typing, post_typing =\n Pulse.Typing.Combinators.bind_res_and_post_typing g c2 x post_hint in\n let g = push_context g \"mk_bind\" e1.range in\n // info_doc g None\n // [prefix 4 1 (doc_of_string \"mk_bind e1 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e1));\n // prefix 4 1 (doc_of_string \"mk_bind c1 = \") (pp #comp c1);\n // prefix 4 1 (doc_of_string \"mk_bind e2 = \") (doc_of_string (Pulse.Syntax.Printer.st_term_to_string e2));\n // prefix 4 1 (doc_of_string \"mk_bind c2 = \") (pp #comp c2)]\n // ;\n let (| e, c, e_typing |) =\n Pulse.Typing.Combinators.mk_bind\n g (tm_star ctxt pre1) \n e1 e2_closed c1 c2 (ppname, x) e1_typing\n u_of_1 \n e2_typing\n t_typing\n post_typing\n (Some? post_hint)\n in\n (| e, c, e_typing |)\n )\n in\n k\n#pop-options\n\nmodule LN = Pulse.Typing.LN\n#push-options \"--z3rlimit_factor 4 --fuel 1 --ifuel 1\"\n\n\nlet st_comp_typing_with_post_hint \n (#g:env) (#ctxt:_)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (post_hint:post_hint_opt g { Some? post_hint })\n (c:comp_st { comp_pre c == ctxt /\\ comp_post_matches_hint c post_hint })\n: st_comp_typing g (st_comp_of_comp c)\n= let st = st_comp_of_comp c in\n let Some ph = post_hint in\n let post_typing_src\n : tot_typing (push_binding ph.g ph.x ppname_default ph.ret_ty)\n (open_term ph.post ph.x) tm_vprop\n = ph.post_typing_src\n in\n let x = fresh g in\n assume (fresh_wrt x g (freevars ph.post));\n assume (None? (lookup g ph.x));\n let post_typing_src\n : tot_typing (push_binding ph.g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = if x = ph.x\n then post_typing_src\n else \n let open Pulse.Typing.Metatheory.Base in\n let tt :\n tot_typing\n (push_binding ph.g x ppname_default ph.ret_ty)\n (subst_term (open_term ph.post ph.x) (renaming ph.x x))\n (subst_term tm_vprop (renaming ph.x x)) =\n tot_typing_renaming1 ph.g ph.x ph.ret_ty (open_term ph.post ph.x) tm_vprop post_typing_src x\n in\n assert (subst_term tm_vprop (renaming ph.x x) == tm_vprop);\n assume (subst_term (open_term ph.post ph.x) (renaming ph.x x) ==\n open_term ph.post x);\n tt\n in\n let post_typing_src\n : tot_typing (push_binding g x ppname_default ph.ret_ty)\n (open_term ph.post x) tm_vprop\n = //weakening: TODO\n RU.magic ()\n in\n let ty_typing : universe_of ph.g st.res st.u = ph.ty_typing in\n let ty_typing : universe_of g st.res st.u =\n Pulse.Typing.Metatheory.tot_typing_weakening_standard ph.g ty_typing g\n in\n assert (st.res == ph.ret_ty);\n assert (st.post == ph.post);\n STC g st x ty_typing ctxt_typing post_typing_src\n\nlet continuation_elaborator_with_bind_fn (#g:env) (#ctxt:term)\n (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#e1:st_term)\n (#c1:comp { C_Tot? c1 })\n (b:binder{b.binder_ty == comp_res c1})\n (e1_typing:st_typing g e1 c1)\n (x:nvar { None? (lookup g (snd x)) })\n: T.Tac (continuation_elaborator\n g ctxt\n (push_binding g (snd x) ppname_default (comp_res c1)) ctxt)\n= let t1 = comp_res c1 in\n assert ((push_binding g (snd x) (fst x) t1) `env_extends` g);\n fun post_hint (| e2, c2, d2 |) ->\n if None? post_hint then T.fail \"bind_fn: expects the post_hint to be set\";\n let ppname, x = x in\n let e2_closed = close_st_term e2 x in\n assume (open_st_term (close_st_term e2 x) x == e2);\n let e = wr c2 (Tm_Bind {binder=b; head=e1; body=e2_closed}) in\n let (| u, c1_typing |) = Pulse.Typing.Metatheory.Base.st_typing_correctness_ctot e1_typing in\n let c2_typing : comp_typing g c2 (universe_of_comp c2) =\n match c2 with\n | C_ST st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_ST _ _ stc\n \n | C_STAtomic i obs st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n let i_typing = CP.core_check_term g i T.E_Total tm_inames in\n CT_STAtomic _ _ obs _ i_typing stc\n\n | C_STGhost st -> \n let stc = st_comp_typing_with_post_hint ctxt_typing post_hint c2 in\n CT_STGhost _ _ stc\n in\n let d : st_typing g e c2 =\n T_BindFn g e1 e2_closed c1 c2 b x e1_typing u c1_typing d2 c2_typing\n in\n (| e, c2, d |)\n\nlet rec check_equiv_emp (g:env) (vp:term)\n : option (vprop_equiv g vp tm_emp)\n = match vp.t with\n | Tm_Emp -> Some (VE_Refl _ _)\n | Tm_Star vp1 vp2 ->\n (match check_equiv_emp g vp1, check_equiv_emp g vp2 with\n | Some d1, Some d2 ->\n let d3 : vprop_equiv g (tm_star vp1 vp2) (tm_star tm_emp tm_emp)\n = VE_Ctxt _ _ _ _ _ d1 d2 in\n let d4 : vprop_equiv g (tm_star tm_emp tm_emp) tm_emp =\n VE_Unit _ _ in\n Some (VE_Trans _ _ _ _ d3 d4)\n | _, _ -> None)\n | _ -> None\n\n\n\nlet emp_inames_included (g:env) (i:term) (_:tot_typing g i tm_inames)\n: prop_validity g (tm_inames_subset tm_emp_inames i)\n= RU.magic()\n\nlet return_in_ctxt (g:env) (y:var) (y_ppname:ppname) (u:universe) (ty:term) (ctxt:vprop)\n (ty_typing:universe_of g ty u)\n (post_hint0:post_hint_opt g { Some? post_hint0 /\\ checker_res_matches_post_hint g post_hint0 y ty ctxt})\n: Pure (st_typing_in_ctxt g ctxt post_hint0)\n (requires lookup g y == Some ty)\n (ensures fun _ -> True)\n= let Some post_hint = post_hint0 in\n let x = fresh g in\n assume (~ (x `Set.mem` freevars post_hint.post));\n let ctag =\n match post_hint.effect_annot with\n | EffectAnnotAtomic _ -> STT_Atomic\n | EffectAnnotGhost -> STT_Ghost\n | _ -> STT\n in\n let y_tm = tm_var {nm_index=y;nm_ppname=y_ppname} in\n let d = T_Return g ctag false u ty y_tm post_hint.post x ty_typing\n (RU.magic ()) // that null_var y is well typed at ty in g, we know since lookup g y == Some ty\n (RU.magic ()) // typing of (open post x) in (g, x) ... post_hint is well-typed, so should get\n in\n let t = wtag (Some ctag) (Tm_Return {expected_type=tm_unknown;insert_eq=false;term=y_tm}) in\n let c = comp_return ctag false u ty y_tm post_hint.post x in\n let d : st_typing g t c = d in\n assume (comp_u c == post_hint.u); // this u should follow from equality of t\n match c, post_hint.effect_annot with\n | C_STAtomic _ obs _, EffectAnnotAtomic { opens } ->\n assert (comp_inames c == tm_emp_inames);\n let pht = post_hint_typing g post_hint x in\n let validity = emp_inames_included g opens pht.effect_annot_typing in\n let d = T_Sub _ _ _ _ d (STS_AtomicInvs _ (st_comp_of_comp c) tm_emp_inames opens obs obs validity) in\n (| _, _, d |)\n | _ -> \n (| _, _, d |)\n\nlet match_comp_res_with_post_hint (#g:env) (#t:st_term) (#c:comp_st)\n (d:st_typing g t c)\n (post_hint:post_hint_opt g)\n : T.Tac (t':st_term &\n c':comp_st &\n st_typing g t' c') =\n\n match post_hint with\n | None -> (| t, c, d |)\n | Some { ret_ty } ->\n let cres = comp_res c in\n if eq_tm cres ret_ty\n then (| t, c, d |)\n else match Pulse.Checker.Pure.check_equiv g cres ret_ty with\n | None ->\n fail g (Some t.range)\n (Printf.sprintf \"Could not prove equiv for computed type %s and expected type %s\"\n (P.term_to_string cres)\n (P.term_to_string ret_ty))\n | Some tok ->\n let d_equiv\n : RT.equiv _ (elab_term cres) (elab_term ret_ty) =\n RT.Rel_eq_token _ _ _ (FStar.Squash.return_squash tok) in\n \n let c' = with_st_comp c {(st_comp_of_comp c) with res = ret_ty } in\n let (| cres_typing, cpre_typing, x, cpost_typing |) =\n st_comp_typing_inversion (fst <| comp_typing_inversion (st_typing_correctness d)) in\n\n let d_stequiv : st_equiv g c c' =\n ST_VPropEquiv _ c c' _ cpre_typing cres_typing cpost_typing d_equiv (VE_Refl _ _) (VE_Refl _ _)\n in\n\n (| t, c', T_Equiv _ _ _ _ d d_stequiv |)\n\nlet apply_checker_result_k (#g:env) (#ctxt:vprop) (#post_hint:post_hint_for_env g)\n (r:checker_result_t g ctxt (Some post_hint))\n (res_ppname:ppname)\n : T.Tac (st_typing_in_ctxt g ctxt (Some post_hint)) =\n\n // TODO: FIXME add to checker result type?\n let (| y, g1, (| u_ty, ty_y, d_ty_y |), (| pre', _ |), k |) = r in\n\n let (| u_ty_y, d_ty_y |) = Pulse.Checker.Pure.check_universe g1 ty_y in\n\n let d : st_typing_in_ctxt g1 pre' (Some post_hint) =\n return_in_ctxt g1 y res_ppname u_ty_y ty_y pre' d_ty_y (Some post_hint) in\n\n k (Some post_hint) d\n\n#push-options \"--z3rlimit_factor 4 --fuel 0 --ifuel 1 --query_stats\"\nlet checker_result_for_st_typing (#g:env) (#ctxt:vprop) (#post_hint:post_hint_opt g)\n (d:st_typing_in_ctxt g ctxt post_hint)\n (ppname:ppname)\n : T.Tac (checker_result_t g ctxt post_hint) =\n\n let (| t, c, d |) = d in\n \n let x = fresh g in\n\n let g' = push_binding g x ppname (comp_res c) in\n let ctxt' = open_term_nv (comp_post c) (ppname, x) in\n let k\n : continuation_elaborator\n g (tm_star tm_emp (comp_pre c))\n g' (tm_star ctxt' tm_emp) =\n continuation_elaborator_with_bind tm_emp d (RU.magic ()) (ppname, x) in\n let k\n : continuation_elaborator g (comp_pre c) g' ctxt' =\n k_elab_equiv k (RU.magic ()) (RU.magic ()) in\n\n let _ : squash (checker_res_matches_post_hint g post_hint x (comp_res c) ctxt') =\n match post_hint with\n | None -> ()\n | Some post_hint -> () in\n\n assert (g' `env_extends` g);\n\n let comp_res_typing, _, f =\n Metatheory.(st_comp_typing_inversion_cofinite (fst <| comp_typing_inversion (st_typing_correctness d))) in\n\n // RU.magic is the typing of comp_res in g'\n // weaken comp_res_typing\n\n assume (~ (x `Set.mem` freevars (comp_post c)));\n let tt : universe_of _ _ _ = RU.magic () in\n (| x, g', (| comp_u c, comp_res c, tt |), (| ctxt', f x |), k |)\n#pop-options\n\nmodule R = FStar.Reflection.V2\n\nlet readback_comp_res_as_comp (c:T.comp) : option comp =\n match c with\n | T.C_Total t -> (\n match readback_comp t with\n | None -> None\n | Some c -> Some c\n )\n | _ -> None\n\nlet rec is_stateful_arrow (g:env) (c:option comp) (args:list T.argv) (out:list T.argv)\n : T.Tac (option (list T.argv & T.argv))\n = let open R in\n match c with\n | None -> None\n | Some (C_ST _)\n | Some (C_STGhost _)\n | Some (C_STAtomic _ _ _) -> (\n match args, out with\n | [], hd::tl -> Some (List.rev tl, hd)\n | _ -> None //leftover or not enough args\n )\n\n | Some (C_Tot c_res) -> (\n if not (Tm_FStar? c_res.t)\n then None\n else (\n let Tm_FStar c_res = c_res.t in\n let ht = T.inspect c_res in\n match ht with\n | T.Tv_Arrow b c -> (\n match args with\n | [] -> ( //no more args; check that only implicits remain, ending in an stateful comp \n let bs, c = T.collect_arr_ln_bs c_res in\n if List.Tot.for_all (fun b -> R.Q_Implicit? (R.inspect_binder b).qual) bs\n then is_stateful_arrow g (readback_comp_res_as_comp (R.inspect_comp c)) [] out\n else None //too few args \n )\n\n | (arg, qual)::args' -> ( //check that this arg qual matches the binder and recurse accordingly\n match b.qual, qual with\n | T.Q_Meta _, T.Q_Implicit\n | T.Q_Implicit, T.Q_Implicit \n | T.Q_Explicit, T.Q_Explicit -> //consume this argument\n is_stateful_arrow g (readback_comp_res_as_comp c) args' ((arg, qual)::out)\n\n | T.Q_Meta _, T.Q_Explicit\n | T.Q_Implicit, T.Q_Explicit -> \n //don't consume this argument\n is_stateful_arrow g (readback_comp_res_as_comp c) args out\n\n | _ -> None //incompatible qualifiers; bail\n )\n )\n | _ ->\n let c_res' = RU.whnf_lax (elab_env g) c_res in\n let ht = T.inspect c_res' in\n if T.Tv_Arrow? ht\n then (\n assume (not_tv_unknown c_res');\n let c_res' = tm_fstar c_res' (T.range_of_term c_res') in\n is_stateful_arrow g (Some (C_Tot c_res')) args out\n )\n else None\n )\n )\n\nmodule RU = Pulse.RuntimeUtils \nlet is_stateful_application (g:env) (e:term) \n : T.Tac (option st_term)\n = match e.t with\n | Tm_FStar host_term -> (\n let head, args = T.collect_app_ln host_term in\n assume (not_tv_unknown head);\n match RU.lax_check_term_with_unknown_universes (elab_env g) head with\n | None -> None\n | Some ht -> \n assume (not_tv_unknown ht);\n let head_t = tm_fstar ht (T.range_of_term ht) in\n match is_stateful_arrow g (Some (C_Tot head_t)) args [] with \n | None -> None\n | Some (applied_args, (last_arg, aqual))->\n let head = T.mk_app head applied_args in\n assume (not_tv_unknown head);\n let head = tm_fstar head (T.range_of_term head) in\n assume (not_tv_unknown last_arg);\n let last_arg = tm_fstar last_arg (T.range_of_term last_arg) in\n let qual = \n match aqual with\n | T.Q_Implicit -> Some Implicit\n | _ -> None\n in\n let st_app = Tm_STApp { head; arg=last_arg; arg_qual=qual} in\n let st_app = { term = st_app; range=e.range; effect_tag=default_effect_hint } in\n Some st_app\n )\n | _ -> None\n\n\nlet apply_conversion\n (#g:env) (#e:term) (#eff:_) (#t0:term)\n (d:typing g e eff t0)\n (#t1:term)\n (eq:Ghost.erased (RT.related (elab_env g) (elab_term t0) RT.R_Eq (elab_term t1)))\n : typing g e eff t1\n = let d : RT.typing (elab_env g) (elab_term e) (eff, (elab_term t0)) = d._0 in\n let r : RT.related (elab_env g) (elab_term t0) RT.R_Eq (elab_term t1) = eq in\n let r = RT.Rel_equiv _ _ _ RT.R_Sub r in\n let s : RT.sub_comp (elab_env g) (eff, (elab_term t0)) (eff, elab_term t1) = \n RT.Relc_typ _ _ _ _ _ r\n in\n E (RT.T_Sub _ _ _ _ d s)\n \nlet norm_typing\n (g:env) (e:term) (eff:_) (t0:term)\n (d:typing g e eff t0)\n (steps:list norm_step)\n : T.Tac (t':term & typing g e eff t')\n = let t = elab_term t0 in\n let u_t_typing : Ghost.erased (u:R.universe & RT.typing _ _ _) = \n Pulse.Typing.Metatheory.Base.typing_correctness d._0\n in\n let (| t', t'_typing, related_t_t' |) =\n Pulse.RuntimeUtils.norm_well_typed_term (dsnd u_t_typing) steps\n in\n match Pulse.Readback.readback_ty t' with\n | None -> T.fail \"Could not readback normalized type\"\n | Some t'' ->\n let d : typing g e eff t'' = apply_conversion d related_t_t' in\n (| t'', d |)\n\nmodule TermEq = FStar.Reflection.V2.TermEq\nlet norm_typing_inverse\n (g:env) (e:term) (eff:_) (t0:term)\n (d:typing g e eff t0)\n (t1:term)\n (#u:_)\n (d1:tot_typing g t1 (tm_type u))\n (steps:list norm_step)\n : T.Tac (option (typing g e eff t1))\n = let (| t1', t1'_typing, related_t1_t1' |) =\n let d1 = Ghost.hide d1._0 in\n Pulse.RuntimeUtils.norm_well_typed_term d1 steps\n in\n match Pulse.Readback.readback_ty t1' with\n | Some t1_p ->\n if TermEq.term_eq (elab_term t0) t1'\n then (\n let related_t1'_t1 = Ghost.hide (RT.Rel_sym _ _ _ related_t1_t1') in\n Some (apply_conversion d related_t1'_t1)\n )\n else None\n | _ -> None\n\nPartial implementation:\nlet norm_st_typing_inverse\n (#g: env)\n (#e: st_term)\n (#t0: term)\n (d: st_typing g e (C_Tot t0))\n (#u: _)\n (t1: term)\n (d1: tot_typing g t1 (tm_type u))\n (steps: list norm_step)\n : T.Tac (option (st_typing g e (C_Tot t1))) =\n \n"
},
{
"role": "assistant",
"content": "let d1:Ghost.erased (RT.tot_typing (elab_env g) (elab_term t1) (RT.tm_type u)) = Ghost.hide d1._0 in\nlet (| t1' , t1'_typing , related_t1_t1' |) = Pulse.RuntimeUtils.norm_well_typed_term d1 steps in\nmatch Pulse.Readback.readback_ty t1' with\n| Some t1_p ->\n if TermEq.term_eq (elab_term t0) t1'\n then\n (let t0_typing:Ghost.erased (RT.tot_typing (elab_env g) (elab_term t0) (RT.tm_type u)) =\n rt_equiv_typing #_ #_ #(elab_term t0) related_t1_t1' d1\n in\n let eq:Ghost.erased (RT.equiv (elab_env g) (elab_term t0) (elab_term t1)) =\n Ghost.hide (RT.Rel_sym _ _ _ related_t1_t1')\n in\n let steq:st_equiv g (C_Tot t0) (C_Tot t1) =\n ST_TotEquiv _ _ _ u (E (Ghost.reveal t0_typing)) eq\n in\n Some (T_Equiv _ _ _ _ d steq))\n else None\n| _ -> None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.nat64\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nPartial implementation:\nlet eval_IMul64 (dst src: nat64) : option nat64 =\n \n"
},
{
"role": "assistant",
"content": "Some (FStar.UInt.mul_mod #64 dst src)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet is_lower_upper_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x04050607 &&\n q.lo1 = 0x00010203 &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet eval_Pshufb_raw (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if is_full_byte_reversal_mask src2 then\n Some (reverse_bytes_quad32 src1)\n else if is_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3))\n else if is_high_dup_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else if is_lower_upper_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else None\n\nPartial implementation:\nlet eval_Pshufb (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_ssse3 (eval_Pshufb_raw src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: q: Vale.X64.Machine_s.quad32 -> Prims.bool\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nPartial implementation:\nlet is_high_dup_reversal_mask (q: quad32) : bool =\n \n"
},
{
"role": "assistant",
"content": "q.lo0 = 0x0C0D0E0F && q.lo1 = 0x08090A0B && q.hi2 = 0x0C0D0E0F && q.hi3 = 0x08090A0B"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet is_lower_upper_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x04050607 &&\n q.lo1 = 0x00010203 &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nPartial implementation:\nlet eval_Pshufb_raw (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "if is_full_byte_reversal_mask src2\nthen Some (reverse_bytes_quad32 src1)\nelse\n if is_byte_reversal_mask src2\n then\n Some\n (Mkfour (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3))\n else\n if is_high_dup_reversal_mask src2\n then\n Some\n (Mkfour (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else\n if is_lower_upper_byte_reversal_mask src2\n then\n Some\n (Mkfour (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: q: Vale.X64.Machine_s.quad32 -> Prims.bool\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nPartial implementation:\nlet is_lower_upper_byte_reversal_mask (q: quad32) : bool =\n \n"
},
{
"role": "assistant",
"content": "q.lo0 = 0x04050607 && q.lo1 = 0x00010203 && q.hi2 = 0x0C0D0E0F && q.hi3 = 0x08090A0B"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: q: Vale.X64.Machine_s.quad32 -> Prims.bool\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nPartial implementation:\nlet is_byte_reversal_mask (q: quad32) : bool =\n \n"
},
{
"role": "assistant",
"content": "q.lo0 = 0x00010203 && q.lo1 = 0x04050607 && q.hi2 = 0x08090A0B && q.hi3 = 0x0C0D0E0F"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n alg: Vale.AES.AES_common_s.algorithm ->\n in_b: Vale.X64.Memory.buffer128 ->\n out_b: Vale.X64.Memory.buffer128 ->\n key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->\n round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->\n keys_b: Vale.X64.Memory.buffer128 ->\n hkeys_b: Vale.X64.Memory.buffer128 ->\n h_LE: Vale.X64.Decls.quad32 ->\n va_s0: Vale.X64.Decls.va_state ->\n va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)\n -> Type0\nEffect: Prims.Tot\n\nContext:\nmodule Vale.AES.X64.GCMdecryptOpt\nopen Vale.Def.Prop_s\nopen Vale.Def.Opaque_s\nopen FStar.Seq\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Seq_s\nopen Vale.Def.Types_s\nopen Vale.Arch.Types\nopen Vale.Arch.HeapImpl\nopen Vale.AES.AES_s\nopen Vale.AES.GCTR_s\nopen Vale.AES.GCTR\nopen Vale.AES.GCM\nopen Vale.AES.GHash_s\nopen Vale.AES.GHash\nopen Vale.AES.GCM_s\nopen Vale.AES.X64.AES\nopen Vale.AES.GF128_s\nopen Vale.AES.GF128\nopen Vale.Poly1305.Math\nopen Vale.AES.GCM_helpers\nopen Vale.AES.X64.GHash\nopen Vale.AES.X64.GCTR\nopen Vale.X64.Machine_s\nopen Vale.X64.Memory\nopen Vale.X64.Stack_i\nopen Vale.X64.State\nopen Vale.X64.Decls\nopen Vale.X64.InsBasic\nopen Vale.X64.InsMem\nopen Vale.X64.InsVector\nopen Vale.X64.InsStack\nopen Vale.X64.InsAes\nopen Vale.X64.QuickCode\nopen Vale.X64.QuickCodes\nopen Vale.AES.X64.GF128_Mul\nopen Vale.X64.Stack\nopen Vale.X64.CPU_Features_s\nopen Vale.Math.Poly2.Bits_s\nopen Vale.AES.X64.AESopt\nopen Vale.AES.X64.AESGCM\nopen Vale.AES.X64.AESopt2\nopen Vale.Lib.Meta\nopen Vale.AES.X64.GCMencryptOpt\nopen Vale.AES.OptPublic\nopen Vale.Lib.Basic\n#reset-options \"--z3rlimit 20 --max_ifuel 0\"\n//-- Gcm_extra_bytes\n\nval va_code_Gcm_extra_bytes : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_extra_bytes alg =\n (va_Block (va_CCons (va_code_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_CCons (va_code_Mov128 (va_op_xmm_xmm 10)\n (va_op_xmm_xmm 0)) (va_CCons (va_code_Ghash_extra_bytes ()) (va_CCons (va_code_Mov128\n (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_CCons (va_code_Pshufb (va_op_xmm_xmm 0)\n (va_op_xmm_xmm 9)) (va_CCons (va_code_AESEncryptBlock alg) (va_CCons (va_code_Pxor\n (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_CCons (va_code_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_CNil ()))))))))))\n\nval va_codegen_success_Gcm_extra_bytes : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_extra_bytes alg =\n (va_pbool_and (va_codegen_success_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0)\n (va_op_reg_opr64_reg64 rRax) 0 Secret) (va_pbool_and (va_codegen_success_Mov128 (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Ghash_extra_bytes ()) (va_pbool_and\n (va_codegen_success_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_pbool_and\n (va_codegen_success_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (va_pbool_and\n (va_codegen_success_AESEncryptBlock alg) (va_pbool_and (va_codegen_success_Pxor (va_op_xmm_xmm\n 10) (va_op_xmm_xmm 0)) (va_pbool_and (va_codegen_success_Store128_buffer\n (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax) (va_op_xmm_xmm 10) 0 Secret)\n (va_ttrue ())))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_extra_bytes (va_mods:va_mods_t) (alg:algorithm) (inout_b:buffer128) (key:(seq\n nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat)\n (old_hash:quad32) (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit\n (va_code_Gcm_extra_bytes alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in let (len:(va_int_range\n 1 1)) = 1 in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 188 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Load128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_xmm_xmm 0) (va_op_reg_opr64_reg64\n rRax) 0 Secret inout_b 0) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 189 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (fun (va_s:va_state) _ -> let\n (hash_input:quad32) = va_get_xmm 0 va_s in va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 193 column 22 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads) (fun\n (va_s:va_state) _ -> va_qAssert va_range1\n \"***** PRECONDITION NOT MET AT line 194 column 5 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (FStar.Seq.Base.equal #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s)\n inout_b) (FStar.Seq.Base.create #quad32 1 hash_input)) (let (snap:(FStar.Seq.Base.seq\n Vale.X64.Decls.quad32)) = Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s) inout_b in va_QSeq\n va_range1\n \"***** PRECONDITION NOT MET AT line 198 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov128 (va_op_xmm_xmm 0) (va_op_xmm_xmm 11)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 199 column 11 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pshufb (va_op_xmm_xmm 0) (va_op_xmm_xmm 9)) (fun (va_s:va_state) _ -> va_QBind\n va_range1\n \"***** PRECONDITION NOT MET AT line 200 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_AESEncryptBlock alg (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 11 va_s)) key\n round_keys keys_b) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 201 column 26 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.AES_s.aes_encrypt_LE_reveal ()) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 204 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Pxor (va_op_xmm_xmm 10) (va_op_xmm_xmm 0)) (va_QBind va_range1\n \"***** PRECONDITION NOT MET AT line 205 column 20 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Store128_buffer (va_op_heaplet_mem_heaplet 5) (va_op_reg_opr64_reg64 rRax)\n (va_op_xmm_xmm 10) 0 Secret inout_b 0) (fun (va_s:va_state) _ -> va_qPURE va_range1\n \"***** PRECONDITION NOT MET AT line 207 column 24 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (fun (_:unit) -> Vale.AES.GCTR.gctr_partial_reveal ()) (va_QEmpty (()))))))))))))))\n\n\nval va_lemma_Gcm_extra_bytes : va_b0:va_code -> va_s0:va_state -> alg:algorithm ->\n inout_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 -> completed_quads:(seq quad32) ->\n h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_extra_bytes alg) va_s0 /\\ va_get_ok va_s0 /\\ (let\n (len:(va_int_range 1 1)) = 1 in sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b\n inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b inout_b /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax va_s0) inout_b\n len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) /\\ va_state_eq va_sM\n (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM\n (va_update_mem va_sM va_s0))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_extra_bytes va_b0 va_s0 alg inout_b key round_keys keys_b hkeys_b total_bytes\n old_hash completed_quads h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8;\n va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm\n 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_extra_bytes va_mods alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_extra_bytes alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 121 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (let (len:(va_int_range 1 1)) = 1 in label va_range1\n \"***** POSTCONDITION NOT MET AT line 174 column 55 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 inout_b (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 177 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg len (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0)) /\\ label\n va_range1\n \"***** POSTCONDITION NOT MET AT line 180 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_s0) inout_b) in label va_range1\n \"***** POSTCONDITION NOT MET AT line 181 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 182 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 183 column 9 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in label va_range1\n \"***** POSTCONDITION NOT MET AT line 186 column 59 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE old_hash input_quads)))))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\n[@ va_qattr]\nlet va_wp_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))\n : Type0 =\n (va_get_ok va_s0 /\\ (let (len:(va_int_range 1 1)) = 1 in sse_enabled /\\\n Vale.X64.Decls.buffers_disjoint128 keys_b inout_b /\\ Vale.X64.Decls.buffers_disjoint128 hkeys_b\n inout_b /\\ Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 5 va_s0) (va_get_reg64 rRax\n va_s0) inout_b len (va_get_mem_layout va_s0) Secret /\\ len == Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 inout_b /\\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour\n #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\ aes_reqs alg key round_keys\n keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\\n pclmulqdq_enabled /\\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0\n va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret /\\ va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32\n (Vale.AES.GHash.ghash_incremental0 h_LE old_hash completed_quads) /\\ FStar.Seq.Base.length\n #quad32 completed_quads == total_bytes `op_Division` 16 /\\ total_bytes < 16 `op_Multiply`\n FStar.Seq.Base.length #quad32 completed_quads + 16 /\\ va_get_reg64 rR10 va_s0 == total_bytes\n `op_Modulus` 16 /\\ total_bytes `op_Modulus` 16 =!= 0 /\\ (0 < total_bytes /\\ total_bytes < 16\n `op_Multiply` Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\\ 16 `op_Multiply`\n (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\\ (forall\n (va_x_mem:vale_heap) (va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32)\n (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32)\n (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm10:quad32) (va_x_heap5:vale_heap)\n (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 5\n va_x_heap5 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7\n (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3\n (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR11\n va_x_r11 (va_upd_reg64 rRcx va_x_rcx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok\n va_sM /\\ (let (len:(va_int_range 1 1)) = 1 in Vale.X64.Decls.modifies_buffer128 inout_b\n (va_get_mem_heaplet 5 va_s0) (va_get_mem_heaplet 5 va_sM) /\\ Vale.AES.GCTR.gctr_partial alg len\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0) inout_b) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 5 va_sM) inout_b) key (va_get_xmm 11 va_s0) /\\ (let raw_quads =\n FStar.Seq.Base.append #quad32 completed_quads (Vale.X64.Decls.s128 (va_get_mem_heaplet 5 va_s0)\n inout_b) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8\n (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =\n Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =\n Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in l_and (FStar.Seq.Base.length\n #Vale.Def.Types_s.quad32 input_quads > 0) (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_sM) == Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads))) ==> va_k va_sM (())))\n\n\nval va_wpProof_Gcm_extra_bytes : alg:algorithm -> inout_b:buffer128 -> key:(seq nat32) ->\n round_keys:(seq quad32) -> keys_b:buffer128 -> hkeys_b:buffer128 -> total_bytes:nat ->\n old_hash:quad32 -> completed_quads:(seq quad32) -> h_LE:quad32 -> va_s0:va_state ->\n va_k:(va_state -> unit -> Type0)\n -> Ghost (va_state & va_fuel & unit)\n (requires (va_t_require va_s0 /\\ va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE va_s0 va_k))\n (ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_extra_bytes alg) ([va_Mod_flags;\n va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;\n va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11;\n va_Mod_reg64 rRcx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))\n[@\"opaque_to_smt\"]\nlet va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE va_s0 va_k =\n let (va_sM, va_f0) = va_lemma_Gcm_extra_bytes (va_code_Gcm_extra_bytes alg) va_s0 alg inout_b key\n round_keys keys_b hkeys_b total_bytes old_hash completed_quads h_LE in\n va_lemma_upd_update va_sM;\n assert (va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 5 va_sM (va_update_xmm 10\n va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5\n va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1\n va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))));\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10; va_Mod_xmm 8; va_Mod_xmm\n 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1;\n va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem]) va_sM va_s0;\n let va_g = () in\n (va_sM, va_f0, va_g)\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_quick_Gcm_extra_bytes (alg:algorithm) (inout_b:buffer128) (key:(seq nat32)) (round_keys:(seq\n quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)\n (completed_quads:(seq quad32)) (h_LE:quad32) : (va_quickCode unit (va_code_Gcm_extra_bytes alg)) =\n (va_QProc (va_code_Gcm_extra_bytes alg) ([va_Mod_flags; va_Mod_mem_heaplet 5; va_Mod_xmm 10;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx; va_Mod_mem])\n (va_wp_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b total_bytes old_hash\n completed_quads h_LE) (va_wpProof_Gcm_extra_bytes alg inout_b key round_keys keys_b hkeys_b\n total_bytes old_hash completed_quads h_LE))\n//--\n//-- Gcm_blocks128\n\nval va_code_Gcm_blocks128 : alg:algorithm -> Tot va_code\n[@ \"opaque_to_smt\" va_qattr]\nlet va_code_Gcm_blocks128 alg =\n (va_Block (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_CCons\n (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_CCons\n (va_code_Ghash_buffer ()) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdi)\n (va_op_opr64_reg64 rRbx)) (va_CCons (va_code_Mov64 (va_op_dst_opr64_reg64 rRdx)\n (va_op_opr64_reg64 rR12)) (va_CCons (va_code_Gctr_blocks128 alg) (va_CNil ())))))))))\n\nval va_codegen_success_Gcm_blocks128 : alg:algorithm -> Tot va_pbool\n[@ \"opaque_to_smt\" va_qattr]\nlet va_codegen_success_Gcm_blocks128 alg =\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx))\n (va_pbool_and (va_codegen_success_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax))\n (va_pbool_and (va_codegen_success_Ghash_buffer ()) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_pbool_and (va_codegen_success_Mov64\n (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_pbool_and\n (va_codegen_success_Gctr_blocks128 alg) (va_ttrue ()))))))))\n\n[@ \"opaque_to_smt\" va_qattr]\nlet va_qcode_Gcm_blocks128 (va_mods:va_mods_t) (alg:algorithm) (in_b:buffer128) (out_b:buffer128)\n (key:(seq nat32)) (round_keys:(seq quad32)) (keys_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)\n : (va_quickCode unit (va_code_Gcm_blocks128 alg)) =\n (qblock va_mods (fun (va_s:va_state) -> let (va_old_s:va_state) = va_s in va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 274 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRbx) (va_op_opr64_reg64 rRdi)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 275 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rR12) (va_op_opr64_reg64 rRdx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 276 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRax)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 277 column 17 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Ghash_buffer hkeys_b in_b h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8\n va_old_s))) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 278 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdi) (va_op_opr64_reg64 rRbx)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 279 column 10 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Mov64 (va_op_dst_opr64_reg64 rRdx) (va_op_opr64_reg64 rR12)) (va_QSeq va_range1\n \"***** PRECONDITION NOT MET AT line 280 column 19 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_quick_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_QEmpty (()))))))))))\n\n\nval va_lemma_Gcm_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->\n out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 ->\n hkeys_b:buffer128 -> h_LE:quad32\n -> Ghost (va_state & va_fuel)\n (requires (va_require_total va_b0 (va_code_Gcm_blocks128 alg) va_s0 /\\ va_get_ok va_s0 /\\\n (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\ (Vale.X64.Decls.buffers_disjoint128 in_b\n out_b \\/ in_b == out_b) /\\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b\n (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\\ va_get_reg64 rRax va_s0 + 16\n `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`\n va_get_reg64 rRdx va_s0 < pow2_64 /\\ l_and (Vale.X64.Decls.buffer_length\n #Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\ va_get_reg64 rRdx\n va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\ va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8\n va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\ Vale.X64.Decls.validSrcAddrs128\n (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0)\n Secret)))\n (ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\\ va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM) /\\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0) /\\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)\n (va_get_reg64 rRdx va_s0) /\\ (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM ==\n va_get_xmm 8 va_s0) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\ (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==> FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b)))) /\\ va_state_eq va_sM (va_update_flags va_sM\n (va_update_mem_heaplet 1 va_sM (va_update_xmm 10 va_sM (va_update_xmm 11 va_sM (va_update_xmm 8\n va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4\n va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0\n va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rR10 va_sM\n (va_update_reg64 rR11 va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbx va_sM\n (va_update_ok va_sM (va_update_mem va_sM va_s0)))))))))))))))))))))))\n[@\"opaque_to_smt\"]\nlet va_lemma_Gcm_blocks128 va_b0 va_s0 alg in_b out_b key round_keys keys_b hkeys_b h_LE =\n let (va_mods:va_mods_t) = [va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11;\n va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm\n 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem] in\n let va_qc = va_qcode_Gcm_blocks128 va_mods alg in_b out_b key round_keys keys_b hkeys_b h_LE in\n let (va_sM, va_fM, va_g) = va_wp_sound_code_norm (va_code_Gcm_blocks128 alg) va_qc va_s0 (fun\n va_s0 va_sM va_g -> let () = va_g in label va_range1\n \"***** POSTCONDITION NOT MET AT line 210 column 1 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_ok va_sM) /\\ (label va_range1\n \"***** POSTCONDITION NOT MET AT line 255 column 53 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1\n va_sM)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 261 column 95 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_s0) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key\n (va_get_xmm 11 va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 262 column 45 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx\n va_s0)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 265 column 93 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 == 0 ==> l_and (va_get_xmm 8 va_sM == va_get_xmm 8 va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) out_b)) /\\ label va_range1\n \"***** POSTCONDITION NOT MET AT line 267 column 131 of file /home/gebner/fstar_dataset/projects/hacl-star/vale/code/crypto/aes/x64/Vale.AES.X64.GCMdecryptOpt.vaf *****\"\n (va_get_reg64 rRdx va_s0 > 0 ==> l_and (va_get_reg64 rRdx va_s0 <= FStar.Seq.Base.length\n #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==>\n FStar.Seq.Base.length #Vale.X64.Decls.quad32 (FStar.Seq.Base.slice #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) 0 (va_get_reg64 rRdx va_s0)) > 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental\n h_LE (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0)) (Vale.X64.Decls.s128\n (va_get_mem_heaplet 1 va_s0) in_b))))) in\n assert_norm (va_qc.mods == va_mods);\n va_lemma_norm_mods ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm\n 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2;\n va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR12; va_Mod_reg64 rRdx; va_Mod_reg64 rR10;\n va_Mod_reg64 rR11; va_Mod_reg64 rRdi; va_Mod_reg64 rRbx; va_Mod_ok; va_Mod_mem]) va_sM va_s0;\n (va_sM, va_fM)\n\nPartial implementation:\nlet va_wp_Gcm_blocks128\n (alg: algorithm)\n (in_b out_b: buffer128)\n (key: (seq nat32))\n (round_keys: (seq quad32))\n (keys_b hkeys_b: buffer128)\n (h_LE: quad32)\n (va_s0: va_state)\n (va_k: (va_state -> unit -> Type0))\n : Type0 =\n \n"
},
{
"role": "assistant",
"content": "(va_get_ok va_s0 /\\\n (sse_enabled /\\ Vale.X64.Decls.buffers_disjoint128 keys_b out_b /\\\n Vale.X64.Decls.buffers_disjoint128 hkeys_b out_b /\\\n (Vale.X64.Decls.buffers_disjoint128 in_b out_b \\/ in_b == out_b) /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRax va_s0)\n in_b\n (va_get_reg64 rRdx va_s0)\n (va_get_mem_layout va_s0)\n Secret /\\\n Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0)\n (va_get_reg64 rRdi va_s0)\n out_b\n (va_get_reg64 rRdx va_s0)\n (va_get_mem_layout va_s0)\n Secret /\\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` (va_get_reg64 rRdx va_s0) < pow2_64 /\\\n va_get_reg64 rRdi va_s0 + 16 `op_Multiply` (va_get_reg64 rRdx va_s0) < pow2_64 /\\\n l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==\n Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)\n (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\\\n va_get_reg64 rRdx va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\\\n va_get_xmm 9 va_s0 ==\n Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\\\n va_get_reg64 rRdx va_s0 < pow2_32 /\\\n aes_reqs alg\n key\n round_keys\n keys_b\n (va_get_reg64 rR8 va_s0)\n (va_get_mem_heaplet 0 va_s0)\n (va_get_mem_layout va_s0) /\\ pclmulqdq_enabled /\\\n Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)\n (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\\\n Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)\n (va_get_reg64 rR9 va_s0 - 32)\n hkeys_b\n 8\n (va_get_mem_layout va_s0)\n Secret) /\\\n (forall (va_x_mem: vale_heap) (va_x_rbx: nat64) (va_x_rdi: nat64) (va_x_r11: nat64)\n (va_x_r10: nat64) (va_x_rdx: nat64) (va_x_r12: nat64) (va_x_xmm0: quad32) (va_x_xmm1: quad32)\n (va_x_xmm2: quad32) (va_x_xmm3: quad32) (va_x_xmm4: quad32) (va_x_xmm5: quad32)\n (va_x_xmm6: quad32) (va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_xmm11: quad32)\n (va_x_xmm10: quad32) (va_x_heap1: vale_heap) (va_x_efl: Vale.X64.Flags.t).\n let va_sM =\n va_upd_flags va_x_efl\n (va_upd_mem_heaplet 1\n va_x_heap1\n (va_upd_xmm 10\n va_x_xmm10\n (va_upd_xmm 11\n va_x_xmm11\n (va_upd_xmm 8\n va_x_xmm8\n (va_upd_xmm 7\n va_x_xmm7\n (va_upd_xmm 6\n va_x_xmm6\n (va_upd_xmm 5\n va_x_xmm5\n (va_upd_xmm 4\n va_x_xmm4\n (va_upd_xmm 3\n va_x_xmm3\n (va_upd_xmm 2\n va_x_xmm2\n (va_upd_xmm 1\n va_x_xmm1\n (va_upd_xmm 0\n va_x_xmm0\n (va_upd_reg64 rR12\n va_x_r12\n (va_upd_reg64 rRdx\n va_x_rdx\n (va_upd_reg64 rR10\n va_x_r10\n (va_upd_reg64 rR11\n va_x_r11\n (va_upd_reg64 rRdi\n va_x_rdi\n (va_upd_reg64 rRbx\n va_x_rbx\n (va_upd_mem va_x_mem\n va_s0)))))))))\n ))))))))))\n in\n va_get_ok va_sM /\\\n (Vale.X64.Decls.modifies_buffer128 out_b\n (va_get_mem_heaplet 1 va_s0)\n (va_get_mem_heaplet 1 va_sM) /\\\n Vale.AES.GCTR.gctr_partial alg\n (va_get_reg64 rRdx va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b)\n key\n (va_get_xmm 11 va_s0) /\\\n va_get_xmm 11 va_sM ==\n Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\\\n (va_get_reg64 rRdx va_s0 == 0 ==>\n l_and (va_get_xmm 8 va_sM == va_get_xmm 8 va_s0)\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==\n Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) /\\\n (va_get_reg64 rRdx va_s0 > 0 ==>\n l_and (va_get_reg64 rRdx va_s0 <=\n FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) ==>\n FStar.Seq.Base.length #Vale.X64.Decls.quad32\n (FStar.Seq.Base.slice #Vale.X64.Decls.quad32\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b)\n 0\n (va_get_reg64 rRdx va_s0)) >\n 0)\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) ==\n Vale.AES.GHash.ghash_incremental h_LE\n (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))\n (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b)))) ==>\n va_k va_sM (())))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet is_lower_upper_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x04050607 &&\n q.lo1 = 0x00010203 &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet eval_Pshufb_raw (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if is_full_byte_reversal_mask src2 then\n Some (reverse_bytes_quad32 src1)\n else if is_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3))\n else if is_high_dup_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else if is_lower_upper_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else None\n\nlet eval_Pshufb (src1 src2:quad32) : option quad32 = check_ssse3 (eval_Pshufb_raw src1 src2)\nval ins_Pshufb : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pshufb\n\nPartial implementation:\nlet eval_VPshufb (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_avx (eval_Pshufb_raw src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: rax: Vale.X64.Machine_s.nat64 -> src: Vale.X64.Machine_s.nat64\n -> FStar.Pervasives.Native.option (Vale.X64.Machine_s.nat64 * Vale.X64.Machine_s.nat64)\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nPartial implementation:\nlet eval_Mul64 (rax src: nat64) : option (nat64 & nat64) =\n \n"
},
{
"role": "assistant",
"content": "Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: count: Vale.Def.Types_s.nat8 -> src: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet is_lower_upper_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x04050607 &&\n q.lo1 = 0x00010203 &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet eval_Pshufb_raw (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if is_full_byte_reversal_mask src2 then\n Some (reverse_bytes_quad32 src1)\n else if is_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3))\n else if is_high_dup_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else if is_lower_upper_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else None\n\nlet eval_Pshufb (src1 src2:quad32) : option quad32 = check_ssse3 (eval_Pshufb_raw src1 src2)\nval ins_Pshufb : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pshufb\n\nlet eval_VPshufb (src1 src2:quad32) : option quad32 = check_avx (eval_Pshufb_raw src1 src2)\nval ins_VPshufb : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPshufb\n\nlet eval_Pshufd (permutation:nat8) (src:quad32) : option quad32 = check_sse2 (\n let bits:bits_of_byte = byte_to_twobits permutation in\n Some (Mkfour\n (select_word src bits.lo0)\n (select_word src bits.lo1)\n (select_word src bits.hi2)\n (select_word src bits.hi3)))\nval ins_Pshufd (permutation:nat8) :\n instr_dep [out opXmm] [opXmm] PreserveFlags (eval_Pshufd permutation)\n\nlet eval_Pcmpeqd (dst src:quad32) : option quad32 = check_sse2 (\n let eq_result (b:bool):nat32 = if b then 0xFFFFFFFF else 0 in\n Some (Mkfour\n (eq_result (src.lo0 = dst.lo0))\n (eq_result (src.lo1 = dst.lo1))\n (eq_result (src.hi2 = dst.hi2))\n (eq_result (src.hi3 = dst.hi3))))\nval ins_Pcmpeqd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pcmpeqd\n\nlet eval_Pextrq (index:nat8) (src:quad32) : option nat64 = check_sse4_1 (\n let src_two = four_to_two_two src in\n Some (two_to_nat 32 (two_select src_two (index % 2))))\nval ins_Pextrq (index:nat8) : instr_dep [out op64] [opXmm] PreserveFlags (eval_Pextrq index)\n\nlet eval_Pinsrd (index:nat8) (dst:quad32) (src:nat64) : option quad32 =\n check_sse4_1 (Some (insert_nat32 dst (src % pow2_32) (index % 4)))\nval ins_Pinsrd (index:nat8) : instr_dep [inOut opXmm] [op64] PreserveFlags (eval_Pinsrd index)\n\nlet eval_Pinsrq (index:nat8) (dst:quad32) (src:nat64) : option quad32 =\n check_sse4_1 (Some (insert_nat64_def dst src (index % 2)))\nval ins_Pinsrq (index:nat8) : instr_dep [inOut opXmm] [op64] PreserveFlags (eval_Pinsrq index)\n\nPartial implementation:\nlet eval_Pslldq_raw (count: nat8) (src: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "if count = 4\nthen Some (Mkfour 0 src.lo0 src.lo1 src.hi2)\nelse if count = 8 then Some (Mkfour 0 0 src.lo0 src.lo1) else None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: imm: Prims.int -> src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet is_lower_upper_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x04050607 &&\n q.lo1 = 0x00010203 &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet eval_Pshufb_raw (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if is_full_byte_reversal_mask src2 then\n Some (reverse_bytes_quad32 src1)\n else if is_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3))\n else if is_high_dup_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else if is_lower_upper_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else None\n\nlet eval_Pshufb (src1 src2:quad32) : option quad32 = check_ssse3 (eval_Pshufb_raw src1 src2)\nval ins_Pshufb : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pshufb\n\nlet eval_VPshufb (src1 src2:quad32) : option quad32 = check_avx (eval_Pshufb_raw src1 src2)\nval ins_VPshufb : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPshufb\n\nlet eval_Pshufd (permutation:nat8) (src:quad32) : option quad32 = check_sse2 (\n let bits:bits_of_byte = byte_to_twobits permutation in\n Some (Mkfour\n (select_word src bits.lo0)\n (select_word src bits.lo1)\n (select_word src bits.hi2)\n (select_word src bits.hi3)))\nval ins_Pshufd (permutation:nat8) :\n instr_dep [out opXmm] [opXmm] PreserveFlags (eval_Pshufd permutation)\n\nlet eval_Pcmpeqd (dst src:quad32) : option quad32 = check_sse2 (\n let eq_result (b:bool):nat32 = if b then 0xFFFFFFFF else 0 in\n Some (Mkfour\n (eq_result (src.lo0 = dst.lo0))\n (eq_result (src.lo1 = dst.lo1))\n (eq_result (src.hi2 = dst.hi2))\n (eq_result (src.hi3 = dst.hi3))))\nval ins_Pcmpeqd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pcmpeqd\n\nlet eval_Pextrq (index:nat8) (src:quad32) : option nat64 = check_sse4_1 (\n let src_two = four_to_two_two src in\n Some (two_to_nat 32 (two_select src_two (index % 2))))\nval ins_Pextrq (index:nat8) : instr_dep [out op64] [opXmm] PreserveFlags (eval_Pextrq index)\n\nlet eval_Pinsrd (index:nat8) (dst:quad32) (src:nat64) : option quad32 =\n check_sse4_1 (Some (insert_nat32 dst (src % pow2_32) (index % 4)))\nval ins_Pinsrd (index:nat8) : instr_dep [inOut opXmm] [op64] PreserveFlags (eval_Pinsrd index)\n\nlet eval_Pinsrq (index:nat8) (dst:quad32) (src:nat64) : option quad32 =\n check_sse4_1 (Some (insert_nat64_def dst src (index % 2)))\nval ins_Pinsrq (index:nat8) : instr_dep [inOut opXmm] [op64] PreserveFlags (eval_Pinsrq index)\n\nlet eval_Pslldq_raw (count:nat8) (src:quad32) : option quad32 =\n // We only spec the two very special cases we need\n if count = 4 then Some (Mkfour 0 src.lo0 src.lo1 src.hi2)\n else if count = 8 then Some (Mkfour 0 0 src.lo0 src.lo1)\n else None\nlet eval_VPslldq (count:nat8) (src:quad32) : option quad32 = check_avx (eval_Pslldq_raw count src)\nval ins_VPslldq (count:nat8) : instr_dep [out opXmm] [opXmm] PreserveFlags (eval_VPslldq count)\n\nlet eval_Psrldq_8_raw (count:nat8) (src:quad32) : option quad32 =\n // We only spec the one very special case we need\n if count = 8 then Some (Mkfour src.hi2 src.hi3 0 0)\n else None\nlet eval_VPsrldq (count:nat8) (src:quad32) : option quad32 = check_avx (eval_Psrldq_8_raw count src)\nval ins_VPsrldq (count:nat8) : instr_dep [out opXmm] [opXmm] PreserveFlags (eval_VPsrldq count)\n\nlet eval_Pclmulqdq (imm:int) (src1 src2:quad32) : option quad32 =\n let Mkfour a0 a1 a2 a3 = src1 in\n let Mkfour b0 b1 b2 b3 = src2 in\n let f x0 x1 y0 y1 =\n let x = Vale.Math.Poly2.Bits_s.of_double32 (Mktwo x0 x1) in\n let y = Vale.Math.Poly2.Bits_s.of_double32 (Mktwo y0 y1) in\n Vale.Math.Poly2.Bits_s.to_quad32 (Vale.Math.Poly2_s.mul x y)\n in\n if pclmulqdq_enabled then\n match imm with\n | 0 -> Some (f a0 a1 b0 b1)\n | 1 -> Some (f a2 a3 b0 b1)\n | 16 -> Some (f a0 a1 b2 b3)\n | 17 -> Some (f a2 a3 b2 b3)\n | _ -> None\n else None\nval ins_Pclmulqdq (imm:int) : instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Pclmulqdq imm)\n\nPartial implementation:\nlet eval_VPclmulqdq (imm: int) (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_avx (eval_Pclmulqdq imm src1 src2)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: src1: Vale.X64.Machine_s.quad32 -> src2: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nPartial implementation:\nlet eval_Paddd_raw (src1 src2: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "Some\n(Mkfour ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: permutation: Vale.Def.Types_s.nat8 -> src: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet is_lower_upper_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x04050607 &&\n q.lo1 = 0x00010203 &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet eval_Pshufb_raw (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if is_full_byte_reversal_mask src2 then\n Some (reverse_bytes_quad32 src1)\n else if is_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3))\n else if is_high_dup_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else if is_lower_upper_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else None\n\nlet eval_Pshufb (src1 src2:quad32) : option quad32 = check_ssse3 (eval_Pshufb_raw src1 src2)\nval ins_Pshufb : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pshufb\n\nlet eval_VPshufb (src1 src2:quad32) : option quad32 = check_avx (eval_Pshufb_raw src1 src2)\nval ins_VPshufb : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPshufb\n\nPartial implementation:\nlet eval_Pshufd (permutation: nat8) (src: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_sse2 (let bits:bits_of_byte = byte_to_twobits permutation in\n Some\n (Mkfour (select_word src bits.lo0)\n (select_word src bits.lo1)\n (select_word src bits.hi2)\n (select_word src bits.hi3)))"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: count: Vale.Def.Types_s.nat8 -> src: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet is_lower_upper_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x04050607 &&\n q.lo1 = 0x00010203 &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet eval_Pshufb_raw (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if is_full_byte_reversal_mask src2 then\n Some (reverse_bytes_quad32 src1)\n else if is_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3))\n else if is_high_dup_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else if is_lower_upper_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else None\n\nlet eval_Pshufb (src1 src2:quad32) : option quad32 = check_ssse3 (eval_Pshufb_raw src1 src2)\nval ins_Pshufb : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pshufb\n\nlet eval_VPshufb (src1 src2:quad32) : option quad32 = check_avx (eval_Pshufb_raw src1 src2)\nval ins_VPshufb : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPshufb\n\nlet eval_Pshufd (permutation:nat8) (src:quad32) : option quad32 = check_sse2 (\n let bits:bits_of_byte = byte_to_twobits permutation in\n Some (Mkfour\n (select_word src bits.lo0)\n (select_word src bits.lo1)\n (select_word src bits.hi2)\n (select_word src bits.hi3)))\nval ins_Pshufd (permutation:nat8) :\n instr_dep [out opXmm] [opXmm] PreserveFlags (eval_Pshufd permutation)\n\nlet eval_Pcmpeqd (dst src:quad32) : option quad32 = check_sse2 (\n let eq_result (b:bool):nat32 = if b then 0xFFFFFFFF else 0 in\n Some (Mkfour\n (eq_result (src.lo0 = dst.lo0))\n (eq_result (src.lo1 = dst.lo1))\n (eq_result (src.hi2 = dst.hi2))\n (eq_result (src.hi3 = dst.hi3))))\nval ins_Pcmpeqd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pcmpeqd\n\nlet eval_Pextrq (index:nat8) (src:quad32) : option nat64 = check_sse4_1 (\n let src_two = four_to_two_two src in\n Some (two_to_nat 32 (two_select src_two (index % 2))))\nval ins_Pextrq (index:nat8) : instr_dep [out op64] [opXmm] PreserveFlags (eval_Pextrq index)\n\nlet eval_Pinsrd (index:nat8) (dst:quad32) (src:nat64) : option quad32 =\n check_sse4_1 (Some (insert_nat32 dst (src % pow2_32) (index % 4)))\nval ins_Pinsrd (index:nat8) : instr_dep [inOut opXmm] [op64] PreserveFlags (eval_Pinsrd index)\n\nlet eval_Pinsrq (index:nat8) (dst:quad32) (src:nat64) : option quad32 =\n check_sse4_1 (Some (insert_nat64_def dst src (index % 2)))\nval ins_Pinsrq (index:nat8) : instr_dep [inOut opXmm] [op64] PreserveFlags (eval_Pinsrq index)\n\nlet eval_Pslldq_raw (count:nat8) (src:quad32) : option quad32 =\n // We only spec the two very special cases we need\n if count = 4 then Some (Mkfour 0 src.lo0 src.lo1 src.hi2)\n else if count = 8 then Some (Mkfour 0 0 src.lo0 src.lo1)\n\nPartial implementation:\nlet eval_VPslldq (count: nat8) (src: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "check_avx (eval_Pslldq_raw count src)"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: dst: Vale.X64.Machine_s.quad32 -> src: Vale.X64.Machine_s.quad32\n -> FStar.Pervasives.Native.option Vale.X64.Machine_s.quad32\nEffect: Prims.Tot\n\nContext:\nmodule Vale.X64.Instructions_s\nopen FStar.Mul\nopen Vale.Def.Words_s\nopen Vale.Def.Words.Two_s\nopen Vale.Def.Words.Four_s\nopen Vale.Def.Types_s\nopen Vale.X64.Machine_s\nopen Vale.X64.Instruction_s\nopen Vale.X64.CPU_Features_s\nopen Vale.X64.CryptoInstructions_s\nopen FStar.Seq.Base\n\nlet eval_Mov64 (src:nat64) : option nat64 = Some src\nval ins_Mov64 : instr_dep [out op64] [op64] PreserveFlags eval_Mov64\n\nlet eval_MovBe64 (src:nat64) : option nat64 = if movbe_enabled then Some (reverse_bytes_nat64 src) else None\nval ins_MovBe64 : instr_dep [out op64] [op64] PreserveFlags eval_MovBe64\n\nlet eval_Bswap64 (dst:nat64) : option nat64 = Some (reverse_bytes_nat64 dst)\nval ins_Bswap64 : instr_dep [inOut op64] [] PreserveFlags eval_Bswap64\n\nlet eval_Cmovc64 (dst src:nat64) (carry:bool) : option nat64 = Some (if carry then src else dst)\nval ins_Cmovc64 : instr_dep [inOut op64] [op64; opFlagsCf] PreserveFlags eval_Cmovc64\n\nlet eval_Add64 (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src in Some (sum >= pow2_64, sum % pow2_64)\nval ins_Add64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Add64\n\nlet eval_AddLea64 (src1 src2:nat64) : option nat64 = Some ((src1 + src2) % pow2_64)\nval ins_AddLea64 :instr_dep [out op64] [op64; op64] PreserveFlags eval_AddLea64\n\nlet eval_AddCarry64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_carry then 1 else 0) in\n Some (sum >= pow2_64, sum % pow2_64)\nval ins_AddCarry64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_AddCarry64\n\nlet eval_Adcx64_Adox64 (old_flag:bool) (dst src:nat64) : option (bool & nat64) =\n let sum = dst + src + (if old_flag then 1 else 0) in\n if adx_enabled then Some (sum >= pow2_64, sum % pow2_64) else None\nval ins_Adcx64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\nval ins_Adox64 : instr_dep [inOut opFlagsOf; inOut op64] [op64] PreserveFlags eval_Adcx64_Adox64\n\nlet eval_Sub64 (dst src:nat64) : option (bool & nat64) =\n let diff = dst - src in Some (diff < 0, diff % pow2_64)\nval ins_Sub64 : instr_dep [out opFlagsCf; inOut op64] [op64] HavocFlags eval_Sub64\n\nlet eval_Sbb64 (old_carry:bool) (dst src:nat64) : option (bool & nat64) =\n let diff = dst - (src + (if old_carry then 1 else 0)) in\n Some (diff < 0, diff % pow2_64) // We specify cf, but underspecify everything else (via HavocFlags)\nval ins_Sbb64 : instr_dep [inOut opFlagsCf; inOut op64] [op64] HavocFlags eval_Sbb64\n\nlet eval_Mul64 (rax src:nat64) : option (nat64 & nat64) =\n Some (FStar.UInt.mul_div #64 rax src, FStar.UInt.mul_mod #64 rax src)\nval ins_Mul64 : instr_dep [out (one64Reg rRdx); inOut (one64Reg rRax)] [op64] HavocFlags eval_Mul64\n\nlet eval_Mulx64 (rdx src:nat64) : option (nat64 & nat64) =\n let hi = FStar.UInt.mul_div #64 rdx src in\n let lo = FStar.UInt.mul_mod #64 rdx src in\n if bmi2_enabled then Some (hi, lo) else None\nval ins_Mulx64 : instr_dep [out op64; out op64] [one64Reg rRdx; op64] PreserveFlags eval_Mulx64\n\nlet eval_IMul64 (dst src:nat64) : option nat64 =\n Some (FStar.UInt.mul_mod #64 dst src)\nval ins_IMul64 : instr_dep [inOut op64] [op64] HavocFlags eval_IMul64\n\nlet eval_And64 (dst src:nat64) : option nat64 = Some (iand dst src)\nval ins_And64 : instr_dep [inOut op64] [op64] HavocFlags eval_And64\n\nlet eval_Xor64 (dst src:nat64) : option (nat64 & (bool & bool)) =\n Some (Vale.Def.Types_s.ixor dst src, (false, false))\nval ins_Xor64 : instr_dep [inOut op64; out opFlagsCf; out opFlagsOf] [op64] HavocFlags eval_Xor64\n\nlet eval_Shr64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishr dst amt) else None\nval ins_Shr64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shr64\n\nlet eval_Shl64 (dst amt:nat64) : option nat64 =\n if amt < 64 then Some (Vale.Def.Types_s.ishl dst amt) else None\nval ins_Shl64 : instr_dep [inOut op64] [op64] HavocFlags eval_Shl64\n\nlet eval_Cpuid (rax rcx:nat64) : option (nat64 & (nat64 & (nat64 & nat64))) =\n Some (cpuid rRax rax rcx, (cpuid rRbx rax rcx, (cpuid rRcx rax rcx, cpuid rRdx rax rcx)))\nval ins_Cpuid :\n instr_dep\n [inOut (one64Reg rRax); out (one64Reg rRbx); inOut (one64Reg rRcx); out (one64Reg rRdx)]\n [] PreserveFlags eval_Cpuid\n\n// The XGETBV instruction requires that OSXSAVE (in CPUID) is enabled.\n// We underspecify XGETBV here to only support fetching XCR0, which\n// is supported on any processor supporting the XGETBV instruction\nlet eval_Xgetbv (rcx:nat64) : option (nat64 & nat64) =\n if osxsave_enabled && rcx = 0 then Some (xgetbv rRax rcx, xgetbv rRdx rcx) else None\nval ins_Xgetbv :\n instr_dep\n [out (one64Reg rRax); out (one64Reg rRdx)]\n [one64Reg rRcx] PreserveFlags eval_Xgetbv\n\nlet check_avx (#a:Type0) (x:option a) : option a =\n if avx_enabled then x else None\n\nlet check_sse2 (#a:Type0) (x:option a) : option a =\n if sse2_enabled then x else None\n\nlet check_ssse3 (#a:Type0) (x:option a) : option a =\n if ssse3_enabled then x else None\n\nlet check_sse4_1 (#a:Type0) (x:option a) : option a =\n if sse4_1_enabled then x else None\n\nlet eval_Movdqu (src:quad32) : option quad32 = check_sse2 (Some src)\nval ins_Movdqu : instr_dep [out opXmm] [opXmm] PreserveFlags eval_Movdqu\n\nlet eval_Pxor (dst src:quad32) : option quad32 = check_sse2 (Some (quad32_xor dst src))\nval ins_Pxor : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pxor\n\nlet eval_VPxor (src1 src2:quad32) : option quad32 = check_avx (Some (quad32_xor src1 src2))\nval ins_VPxor : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPxor\n\nlet eval_Pand (dst src:quad32) : option quad32 = check_sse2 (Some (four_map2 (fun di si -> iand di si) dst src))\nval ins_Pand : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pand\n\nlet eval_Paddd_raw (src1 src2:quad32) : option quad32 =\n Some (Mkfour\n ((src1.lo0 + src2.lo0) % pow2_32)\n ((src1.lo1 + src2.lo1) % pow2_32)\n ((src1.hi2 + src2.hi2) % pow2_32)\n ((src1.hi3 + src2.hi3) % pow2_32))\n\nlet eval_Paddd (src1 src2:quad32) : option quad32 = check_sse2 (eval_Paddd_raw src1 src2)\nval ins_Paddd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Paddd\n\nlet eval_VPaddd (src1 src2:quad32) : option quad32 = check_avx (eval_Paddd_raw src1 src2)\nval ins_VPaddd : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPaddd\n\nlet eval_Pslld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishl i amt) dst) else None)\nval ins_Pslld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Pslld amt)\n\nlet eval_Psrld (amt:int) (dst:quad32) : option quad32 =\n check_sse2 (if 0 <= amt && amt < 32 then Some (four_map (fun i -> ishr i amt) dst) else None)\nval ins_Psrld (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrld amt)\n\nlet eval_Psrldq (amt:int) (dst:quad32) : option quad32 = check_sse2 (\n if 0 <= amt && amt < 16 then\n let src_bytes = le_quad32_to_bytes dst in\n let zero_pad = Seq.create amt 0 in\n let remaining_bytes = slice src_bytes amt (length src_bytes) in\n Some (le_bytes_to_quad32 (append zero_pad remaining_bytes))\n else None)\nval ins_Psrldq (amt:int) : instr_dep [inOut opXmm] [] PreserveFlags (eval_Psrldq amt)\n\nlet eval_Palignr_raw (amount:nat8) (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if amount = 4 then\n Some (Mkfour src2.lo1 src2.hi2 src2.hi3 src1.lo0)\n else if amount = 8 then\n Some (Mkfour src2.hi2 src2.hi3 src1.lo0 src1.lo1)\n else None\n\nlet eval_Palignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_ssse3 (eval_Palignr_raw amount src1 src2)\nval ins_Palignr (amount:nat8) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Palignr amount)\n\nlet eval_VPalignr (amount:nat8) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Palignr_raw amount src1 src2)\nval ins_VPalignr (amount:nat8) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPalignr amount)\n\nlet eval_Shufpd_raw (permutation:int) (src1 src2:quad32) : option quad32 =\n if 0 <= permutation && permutation < 4 then\n Some (Mkfour\n (if permutation % 2 = 0 then src1.lo0 else src1.hi2)\n (if permutation % 2 = 0 then src1.lo1 else src1.hi3)\n (if (permutation / 2) % 2 = 0 then src2.lo0 else src2.hi2)\n (if (permutation / 2) % 2 = 0 then src2.lo1 else src2.hi3))\n else None\n\nlet eval_Shufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_sse2 (eval_Shufpd_raw permutation src1 src2)\nval ins_Shufpd (permutation:int) :\n instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Shufpd permutation)\n\nlet eval_VShufpd (permutation:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Shufpd_raw permutation src1 src2)\nval ins_VShufpd (permutation:int) :\n instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VShufpd permutation)\n\nlet is_full_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x04050607 &&\n q.hi3 = 0x00010203\n\nlet is_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x00010203 &&\n q.lo1 = 0x04050607 &&\n q.hi2 = 0x08090A0B &&\n q.hi3 = 0x0C0D0E0F\n\nlet is_high_dup_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x0C0D0E0F &&\n q.lo1 = 0x08090A0B &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet is_lower_upper_byte_reversal_mask (q:quad32) : bool =\n q.lo0 = 0x04050607 &&\n q.lo1 = 0x00010203 &&\n q.hi2 = 0x0C0D0E0F &&\n q.hi3 = 0x08090A0B\n\nlet eval_Pshufb_raw (src1 src2:quad32) : option quad32 =\n // We only spec a restricted version sufficient for a handful of standard patterns\n if is_full_byte_reversal_mask src2 then\n Some (reverse_bytes_quad32 src1)\n else if is_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3))\n else if is_high_dup_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else if is_lower_upper_byte_reversal_mask src2 then\n Some (Mkfour\n (reverse_bytes_nat32 src1.lo1)\n (reverse_bytes_nat32 src1.lo0)\n (reverse_bytes_nat32 src1.hi3)\n (reverse_bytes_nat32 src1.hi2))\n else None\n\nlet eval_Pshufb (src1 src2:quad32) : option quad32 = check_ssse3 (eval_Pshufb_raw src1 src2)\nval ins_Pshufb : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pshufb\n\nlet eval_VPshufb (src1 src2:quad32) : option quad32 = check_avx (eval_Pshufb_raw src1 src2)\nval ins_VPshufb : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VPshufb\n\nlet eval_Pshufd (permutation:nat8) (src:quad32) : option quad32 = check_sse2 (\n let bits:bits_of_byte = byte_to_twobits permutation in\n Some (Mkfour\n (select_word src bits.lo0)\n (select_word src bits.lo1)\n (select_word src bits.hi2)\n (select_word src bits.hi3)))\nval ins_Pshufd (permutation:nat8) :\n instr_dep [out opXmm] [opXmm] PreserveFlags (eval_Pshufd permutation)\n\nlet eval_Pcmpeqd (dst src:quad32) : option quad32 = check_sse2 (\n let eq_result (b:bool):nat32 = if b then 0xFFFFFFFF else 0 in\n Some (Mkfour\n (eq_result (src.lo0 = dst.lo0))\n (eq_result (src.lo1 = dst.lo1))\n (eq_result (src.hi2 = dst.hi2))\n (eq_result (src.hi3 = dst.hi3))))\nval ins_Pcmpeqd : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_Pcmpeqd\n\nlet eval_Pextrq (index:nat8) (src:quad32) : option nat64 = check_sse4_1 (\n let src_two = four_to_two_two src in\n Some (two_to_nat 32 (two_select src_two (index % 2))))\nval ins_Pextrq (index:nat8) : instr_dep [out op64] [opXmm] PreserveFlags (eval_Pextrq index)\n\nlet eval_Pinsrd (index:nat8) (dst:quad32) (src:nat64) : option quad32 =\n check_sse4_1 (Some (insert_nat32 dst (src % pow2_32) (index % 4)))\nval ins_Pinsrd (index:nat8) : instr_dep [inOut opXmm] [op64] PreserveFlags (eval_Pinsrd index)\n\nlet eval_Pinsrq (index:nat8) (dst:quad32) (src:nat64) : option quad32 =\n check_sse4_1 (Some (insert_nat64_def dst src (index % 2)))\nval ins_Pinsrq (index:nat8) : instr_dep [inOut opXmm] [op64] PreserveFlags (eval_Pinsrq index)\n\nlet eval_Pslldq_raw (count:nat8) (src:quad32) : option quad32 =\n // We only spec the two very special cases we need\n if count = 4 then Some (Mkfour 0 src.lo0 src.lo1 src.hi2)\n else if count = 8 then Some (Mkfour 0 0 src.lo0 src.lo1)\n else None\nlet eval_VPslldq (count:nat8) (src:quad32) : option quad32 = check_avx (eval_Pslldq_raw count src)\nval ins_VPslldq (count:nat8) : instr_dep [out opXmm] [opXmm] PreserveFlags (eval_VPslldq count)\n\nlet eval_Psrldq_8_raw (count:nat8) (src:quad32) : option quad32 =\n // We only spec the one very special case we need\n if count = 8 then Some (Mkfour src.hi2 src.hi3 0 0)\n else None\nlet eval_VPsrldq (count:nat8) (src:quad32) : option quad32 = check_avx (eval_Psrldq_8_raw count src)\nval ins_VPsrldq (count:nat8) : instr_dep [out opXmm] [opXmm] PreserveFlags (eval_VPsrldq count)\n\nlet eval_Pclmulqdq (imm:int) (src1 src2:quad32) : option quad32 =\n let Mkfour a0 a1 a2 a3 = src1 in\n let Mkfour b0 b1 b2 b3 = src2 in\n let f x0 x1 y0 y1 =\n let x = Vale.Math.Poly2.Bits_s.of_double32 (Mktwo x0 x1) in\n let y = Vale.Math.Poly2.Bits_s.of_double32 (Mktwo y0 y1) in\n Vale.Math.Poly2.Bits_s.to_quad32 (Vale.Math.Poly2_s.mul x y)\n in\n if pclmulqdq_enabled then\n match imm with\n | 0 -> Some (f a0 a1 b0 b1)\n | 1 -> Some (f a2 a3 b0 b1)\n | 16 -> Some (f a0 a1 b2 b3)\n | 17 -> Some (f a2 a3 b2 b3)\n | _ -> None\n else None\nval ins_Pclmulqdq (imm:int) : instr_dep [inOut opXmm] [opXmm] PreserveFlags (eval_Pclmulqdq imm)\n\nlet eval_VPclmulqdq (imm:int) (src1 src2:quad32) : option quad32 =\n check_avx (eval_Pclmulqdq imm src1 src2)\nval ins_VPclmulqdq (imm:int) : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags (eval_VPclmulqdq imm)\n\nlet eval_AESNI_enc (src1 src2:quad32) : option quad32 =\n if aesni_enabled then\n Some (quad32_xor (Vale.AES.AES_s.mix_columns_LE (Vale.AES.AES_s.sub_bytes (Vale.AES.AES_s.shift_rows_LE src1))) src2)\n else None\nval ins_AESNI_enc : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_AESNI_enc\n\nlet eval_VAESNI_enc (src1 src2:quad32) : option quad32 = check_avx (eval_AESNI_enc src1 src2)\nval ins_VAESNI_enc : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VAESNI_enc\n\nlet eval_AESNI_enc_last (src1 src2:quad32) : option quad32 =\n if aesni_enabled then\n Some (quad32_xor (Vale.AES.AES_s.sub_bytes (Vale.AES.AES_s.shift_rows_LE src1)) src2)\n else None\nval ins_AESNI_enc_last : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_AESNI_enc_last\n\nlet eval_VAESNI_enc_last (src1 src2:quad32) : option quad32 = check_avx (eval_AESNI_enc_last src1 src2)\nval ins_VAESNI_enc_last : instr_dep [out opXmm] [opXmm; opXmm] PreserveFlags eval_VAESNI_enc_last\n\nlet eval_AESNI_dec (dst src:quad32) : option quad32 =\n if aesni_enabled then\n Some (quad32_xor (Vale.AES.AES_s.inv_mix_columns_LE (Vale.AES.AES_s.inv_sub_bytes (Vale.AES.AES_s.inv_shift_rows_LE dst))) src)\n else None\nval ins_AESNI_dec : instr_dep [inOut opXmm] [opXmm] PreserveFlags eval_AESNI_dec\n\nPartial implementation:\nlet eval_AESNI_dec_last (dst src: quad32) : option quad32 =\n \n"
},
{
"role": "assistant",
"content": "if aesni_enabled\nthen Some (quad32_xor (Vale.AES.AES_s.inv_sub_bytes (Vale.AES.AES_s.inv_shift_rows_LE dst)) src)\nelse None"
}
] |
[
{
"role": "user",
"content": "Complete this F* function with a correct proof.\n\nType: \n d: Pulse.Typing.st_typing g t c ->\n pre: Pulse.Syntax.Base.term ->\n veq: Pulse.Typing.vprop_equiv g (Pulse.Syntax.Base.comp_pre c) pre\n -> Pulse.Typing.st_typing g t (Pulse.Checker.Base.comp_with_pre c pre)\nEffect: Prims.Tot\n\nContext:\n(*\n Copyright 2023 Microsoft Research\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*)\n\nmodule Pulse.Checker.Base\n\nmodule T = FStar.Tactics.V2\nmodule RT = FStar.Reflection.Typing\nmodule Metatheory = Pulse.Typing.Metatheory\nmodule CP = Pulse.Checker.Pure\nmodule RU = Pulse.RuntimeUtils\nmodule FV = Pulse.Typing.FV\nmodule P = Pulse.Syntax.Printer\n\nopen Pulse.Typing.Combinators\nopen Pulse.Typing.Metatheory\n\nlet debug (g:env) (f: unit -> T.Tac string) : T.Tac unit =\n if RU.debug_at_level (fstar_env g) \"pulse.checker\" then\n T.print (f())\n\nlet format_failed_goal (g:env) (ctxt:list term) (goal:list term) =\n let terms_to_strings (ts:list term)= T.map Pulse.Syntax.Printer.term_to_string ts in\n let numbered_list ss = \n let _, s = T.fold_left (fun (i, acc) s -> (i+1, Printf.sprintf \"%d. %s\" i s :: acc)) (1, []) ss in\n String.concat \"\\n \" (List.rev s)\n in\n let format_terms (ts:list term) = numbered_list (terms_to_strings ts) in\n Printf.sprintf \n \"Failed to prove the following goals:\\n \\\n %s\\n\\\n The remaining conjuncts in the separation logic context available for use are:\\n \\\n %s\\n\\\n The typing context is:\\n \\\n %s\\n\"\n (format_terms goal)\n (format_terms ctxt)\n (env_to_string g)\n\n\nlet mk_arrow ty t = RT.mk_arrow (elab_term ty) T.Q_Explicit (elab_term t)\nlet mk_abs ty t = RT.(mk_abs (elab_term ty) T.Q_Explicit (elab_term t))\n\nlet intro_comp_typing (g:env) \n (c:comp_st)\n (pre_typing:tot_typing g (comp_pre c) tm_vprop)\n (i_typing:effect_annot_typing g (effect_annot_of_comp c))\n (res_typing:universe_of g (comp_res c) (comp_u c))\n (x:var { fresh_wrt x g (freevars (comp_post c)) })\n (post_typing:tot_typing (push_binding g x ppname_default (comp_res c)) (open_term (comp_post c) x) tm_vprop)\n : T.Tac (comp_typing g c (universe_of_comp c))\n = let intro_st_comp_typing (st:st_comp { comp_u c == st.u /\\\n comp_pre c == st.pre /\\\n comp_res c == st.res /\\\n comp_post c == st.post } )\n : T.Tac (st_comp_typing g st)\n = STC g st x res_typing pre_typing post_typing\n in\n match c with\n | C_ST st -> \n let stc = intro_st_comp_typing st in\n CT_ST _ _ stc\n | C_STAtomic i obs st -> \n let stc = intro_st_comp_typing st in\n CT_STAtomic _ i obs _ i_typing stc\n | C_STGhost st -> \n let stc = intro_st_comp_typing st in\n CT_STGhost _ _ stc\n\nirreducible\nlet post_typing_as_abstraction\n (#g:env) (#x:var) (#ty:term) (#t:term { fresh_wrt x g (freevars t) })\n (_:tot_typing (push_binding g x ppname_default ty) (open_term t x) tm_vprop)\n : FStar.Ghost.erased (RT.tot_typing (elab_env g) (mk_abs ty t) (mk_arrow ty tm_vprop)) \n = admit()\n\nlet check_effect_annot (g:env) (e:effect_annot) : T.Tac (effect_annot_typing g e) =\n match e with\n | EffectAnnotSTT\n | EffectAnnotGhost -> ()\n | EffectAnnotAtomic { opens } ->\n CP.core_check_term g opens T.E_Total tm_inames\n\nlet intro_post_hint g effect_annot ret_ty_opt post =\n let x = fresh g in\n let ret_ty = \n match ret_ty_opt with\n | None -> tm_fstar RT.unit_ty FStar.Range.range_0\n | Some t -> t\n in\n let ret_ty, _ = CP.instantiate_term_implicits g ret_ty in\n let (| u, ty_typing |) = CP.check_universe g ret_ty in\n let (| post, post_typing |) = CP.check_vprop (push_binding g x ppname_default ret_ty) (open_term_nv post (v_as_nv x)) in \n let post' = close_term post x in\n Pulse.Typing.FV.freevars_close_term post x 0;\n let effect_annot_typing = check_effect_annot g effect_annot in\n assume (open_term post' x == post);\n { g;\n effect_annot;\n effect_annot_typing;\n ret_ty; u; ty_typing;\n post=post';\n x; post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction #_ #_ #_ #post' post_typing }\n\n\nlet comp_typing_as_effect_annot_typing (#g:env) (#c:comp_st) (ct:comp_typing_u g c)\n: effect_annot_typing g (effect_annot_of_comp c)\n= let _, iname_typing = Metatheory.comp_typing_inversion ct in\n match c with\n | C_ST _ -> ()\n | C_STGhost _ -> ()\n | C_STAtomic opens obs _ -> iname_typing\n \n\nlet post_hint_from_comp_typing #g #c ct = \n let st_comp_typing, _ = Metatheory.comp_typing_inversion ct in\n let (| ty_typing, pre_typing, x, post_typing |) = Metatheory.st_comp_typing_inversion st_comp_typing in\n let effect_annot_typing = comp_typing_as_effect_annot_typing ct in\n let p : post_hint_t = \n { g;\n effect_annot=_;\n effect_annot_typing;\n ret_ty = comp_res c; u=comp_u c; \n ty_typing=ty_typing;\n post=comp_post c;\n x;\n post_typing_src=post_typing;\n post_typing=post_typing_as_abstraction post_typing }\n in\n p\n\nlet comp_typing_from_post_hint\n (#g: env)\n (c: comp_st)\n (pre_typing: tot_typing g (comp_pre c) tm_vprop)\n (p:post_hint_for_env g { comp_post_matches_hint c (Some p) })\n: T.Tac (comp_typing_u g c)\n= let x = fresh g in\n if x `Set.mem` freevars p.post //exclude this\n then fail g None \"Impossible: unexpected freevar in post, please file a bug-report\"\n else let post_typing = post_hint_typing g p x in\n intro_comp_typing g c pre_typing\n post_typing.effect_annot_typing\n post_typing.ty_typing \n x post_typing.post_typing\n\n\nlet extend_post_hint g p x tx conjunct conjunct_typing =\n let g' = push_binding g x ppname_default tx in\n let y = fresh g' in\n let g'' = push_binding g' y ppname_default p.ret_ty in\n let p_post_typing_src\n : tot_typing (push_binding p.g p.x ppname_default p.ret_ty)\n (open_term p.post p.x) tm_vprop\n = p.post_typing_src\n in\n let p_post_typing_src''\n : tot_typing g'' (open_term p.post y) tm_vprop\n = RU.magic () //weaken, rename\n in\n let conjunct_typing'\n : tot_typing g' conjunct tm_vprop\n = conjunct_typing\n in\n let conjunct_typing''\n : tot_typing g'' (open_term conjunct y) tm_vprop\n = RU.magic () //weaken\n in\n let new_post = tm_star p.post conjunct in\n let new_post_typing\n : tot_typing g'' (open_term new_post y) tm_vprop\n = Pulse.Typing.star_typing p_post_typing_src'' conjunct_typing''\n in\n assume (fresh_wrt y g'' (freevars new_post));\n let new_post_abs_typing\n : Ghost.erased (RT.tot_typing (elab_env g'') (mk_abs p.ret_ty new_post) (mk_arrow p.ret_ty tm_vprop))\n = post_typing_as_abstraction new_post_typing\n in\n { p with\n g=g';\n post=new_post;\n x=y;\n post_typing_src=new_post_typing;\n post_typing=new_post_abs_typing }\n\nlet k_elab_unit (g:env) (ctxt:term)\n : continuation_elaborator g ctxt g ctxt\n = fun p r -> r\n\nlet k_elab_trans\n (#g0:env) (#g1:env { g1 `env_extends` g0 }) (#g2:env { g2 `env_extends` g1 }) (#ctxt0 #ctxt1 #ctxt2:term)\n (k0:continuation_elaborator g0 ctxt0 g1 ctxt1)\n (k1:continuation_elaborator g1 ctxt1 g2 ctxt2 { g1 `env_extends` g0})\n : continuation_elaborator g0 ctxt0 g2 ctxt2\n = fun post_hint res -> k0 post_hint (k1 post_hint res)\n\nlet comp_st_with_post (c:comp_st) (post:term)\n : c':comp_st { st_comp_of_comp c' == ({ st_comp_of_comp c with post} <: st_comp) } =\n match c with\n | C_ST st -> C_ST { st with post }\n | C_STGhost st -> C_STGhost { st with post }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with post}\n\nlet ve_unit_r g (p:term) : vprop_equiv g (tm_star p tm_emp) p = \n VE_Trans _ _ _ _ (VE_Comm _ _ _) (VE_Unit _ _)\n\nlet st_equiv_trans (#g:env) (#c0 #c1 #c2:comp) (d01:st_equiv g c0 c1) (d12:st_equiv g c1 c2)\n : option (st_equiv g c0 c2)\n = \n match d01 with\n | ST_VPropEquiv _f _c0 _c1 x c0_pre_typing c0_res_typing c0_post_typing eq_res_01 eq_pre_01 eq_post_01 -> (\n let ST_VPropEquiv _f _c1 _c2 y c1_pre_typing c1_res_typing c1_post_typing eq_res_12 eq_pre_12 eq_post_12 = d12 in\n if x = y && eq_tm (comp_res c0) (comp_res c1)\n then Some (\n ST_VPropEquiv g c0 c2 x c0_pre_typing c0_res_typing c0_post_typing\n (RT.Rel_trans _ _ _ _ _ eq_res_01 eq_res_12)\n (VE_Trans _ _ _ _ eq_pre_01 eq_pre_12)\n (VE_Trans _ _ _ _ eq_post_01 eq_post_12)\n )\n else None\n )\n | ST_TotEquiv g t1 t2 u typing eq ->\n let ST_TotEquiv _g _t1 t3 _ _ eq' = d12 in\n let eq'' = Ghost.hide (RT.Rel_trans _ _ _ _ _ eq eq') in\n Some (ST_TotEquiv g t1 t3 u typing eq'')\n\nlet t_equiv #g #st #c (d:st_typing g st c) (#c':comp) (eq:st_equiv g c c')\n : st_typing g st c'\n = match d with\n | T_Equiv _ _ _ _ d0 eq' -> (\n match st_equiv_trans eq' eq with\n | None -> T_Equiv _ _ _ _ d eq\n | Some eq'' -> T_Equiv _ _ _ _ d0 eq''\n )\n | _ -> T_Equiv _ _ _ _ d eq\n\nlet st_equiv_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { freevars post `Set.subset` freevars (comp_post c)})\n (veq: (x:var { fresh_wrt x g (freevars (comp_post c)) } ->\n vprop_equiv (push_binding g x ppname_default (comp_res c)) \n (open_term (comp_post c) x)\n (open_term post x)))\n : st_typing g t (comp_st_with_post c post)\n = if eq_tm post (comp_post c) then d\n else\n let c' = comp_st_with_post c post in\n let (| u_of, pre_typing, x, post_typing |) = Metatheory.(st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))) in\n let veq = veq x in\n let st_equiv : st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) (VE_Refl _ _) veq\n in\n t_equiv d st_equiv\n\nlet simplify_post (#g:env) (#t:st_term) (#c:comp_st) (d:st_typing g t c)\n (post:term { comp_post c == tm_star post tm_emp})\n : st_typing g t (comp_st_with_post c post)\n = st_equiv_post d post (fun x -> ve_unit_r (push_binding g x ppname_default (comp_res c)) (open_term post x))\n\nlet simplify_lemma (c:comp_st) (c':comp_st) (post_hint:option post_hint_t)\n : Lemma\n (requires\n comp_post_matches_hint c post_hint /\\\n effect_annot_of_comp c == effect_annot_of_comp c' /\\\n comp_res c' == comp_res c /\\\n comp_u c' == comp_u c /\\\n comp_post c' == tm_star (comp_post c) tm_emp)\n (ensures comp_post_matches_hint (comp_st_with_post c' (comp_post c)) post_hint /\\\n comp_pre (comp_st_with_post c' (comp_post c)) == comp_pre c')\n = ()\n\nlet vprop_equiv_typing_bk (#g:env) (#ctxt:_) (ctxt_typing:tot_typing g ctxt tm_vprop)\n (#p:_) (d:vprop_equiv g p ctxt)\n : tot_typing g p tm_vprop \n = let _, bk = vprop_equiv_typing d in\n bk ctxt_typing\n\nlet comp_with_pre (c:comp_st) (pre:term) =\n match c with\n | C_ST st -> C_ST { st with pre }\n | C_STGhost st -> C_STGhost { st with pre }\n | C_STAtomic i obs st -> C_STAtomic i obs {st with pre}\n\nPartial implementation:\nlet st_equiv_pre\n (#g: env)\n (#t: st_term)\n (#c: comp_st)\n (d: st_typing g t c)\n (pre: term)\n (veq: vprop_equiv g (comp_pre c) pre)\n : st_typing g t (comp_with_pre c pre) =\n \n"
},
{
"role": "assistant",
"content": "if eq_tm pre (comp_pre c)\nthen d\nelse\n let c' = comp_with_pre c pre in\n let (| u_of , pre_typing , x , post_typing |) =\n let open Metatheory in\n st_comp_typing_inversion (fst (comp_typing_inversion (st_typing_correctness d)))\n in\n let st_equiv:st_equiv g c c' =\n ST_VPropEquiv g c c' x pre_typing u_of post_typing (RT.Rel_refl _ _ _) veq (VE_Refl _ _)\n in\n t_equiv d st_equiv"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.